OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/mark-compact.h" | 5 #include "src/heap/mark-compact.h" |
6 | 6 |
7 #include "src/base/atomicops.h" | 7 #include "src/base/atomicops.h" |
8 #include "src/base/bits.h" | 8 #include "src/base/bits.h" |
9 #include "src/base/sys-info.h" | 9 #include "src/base/sys-info.h" |
10 #include "src/code-stubs.h" | 10 #include "src/code-stubs.h" |
(...skipping 25 matching lines...) Expand all Loading... |
36 | 36 |
37 const char* Marking::kWhiteBitPattern = "00"; | 37 const char* Marking::kWhiteBitPattern = "00"; |
38 const char* Marking::kBlackBitPattern = "11"; | 38 const char* Marking::kBlackBitPattern = "11"; |
39 const char* Marking::kGreyBitPattern = "10"; | 39 const char* Marking::kGreyBitPattern = "10"; |
40 const char* Marking::kImpossibleBitPattern = "01"; | 40 const char* Marking::kImpossibleBitPattern = "01"; |
41 | 41 |
42 // The following has to hold in order for {ObjectMarking::MarkBitFrom} to not | 42 // The following has to hold in order for {ObjectMarking::MarkBitFrom} to not |
43 // produce invalid {kImpossibleBitPattern} in the marking bitmap by overlapping. | 43 // produce invalid {kImpossibleBitPattern} in the marking bitmap by overlapping. |
44 STATIC_ASSERT(Heap::kMinObjectSizeInWords >= 2); | 44 STATIC_ASSERT(Heap::kMinObjectSizeInWords >= 2); |
45 | 45 |
46 // ============================================================================= | 46 |
47 // Verifiers | 47 // ------------------------------------------------------------------------- |
48 // ============================================================================= | 48 // MarkCompactCollector |
| 49 |
| 50 MarkCompactCollector::MarkCompactCollector(Heap* heap) |
| 51 : // NOLINT |
| 52 heap_(heap), |
| 53 page_parallel_job_semaphore_(0), |
| 54 #ifdef DEBUG |
| 55 state_(IDLE), |
| 56 #endif |
| 57 was_marked_incrementally_(false), |
| 58 evacuation_(false), |
| 59 compacting_(false), |
| 60 black_allocation_(false), |
| 61 have_code_to_deoptimize_(false), |
| 62 marking_deque_(heap), |
| 63 code_flusher_(nullptr), |
| 64 sweeper_(heap) { |
| 65 } |
49 | 66 |
50 #ifdef VERIFY_HEAP | 67 #ifdef VERIFY_HEAP |
51 namespace { | |
52 | |
53 class MarkingVerifier : public ObjectVisitor { | 68 class MarkingVerifier : public ObjectVisitor { |
54 public: | 69 public: |
55 virtual void Run() = 0; | 70 virtual void Run() = 0; |
56 | 71 |
57 protected: | 72 protected: |
58 explicit MarkingVerifier(Heap* heap) : heap_(heap) {} | 73 explicit MarkingVerifier(Heap* heap) : heap_(heap) {} |
59 | 74 |
60 virtual MarkingState marking_state(MemoryChunk* chunk) = 0; | 75 virtual MarkingState marking_state(MemoryChunk* chunk) = 0; |
61 | 76 |
62 void VerifyRoots(VisitMode mode); | 77 void VerifyRoots(VisitMode mode); |
(...skipping 135 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
198 for (Object** current = start; current < end; current++) { | 213 for (Object** current = start; current < end; current++) { |
199 if ((*current)->IsHeapObject()) { | 214 if ((*current)->IsHeapObject()) { |
200 HeapObject* object = HeapObject::cast(*current); | 215 HeapObject* object = HeapObject::cast(*current); |
201 if (!heap_->InNewSpace(object)) return; | 216 if (!heap_->InNewSpace(object)) return; |
202 CHECK(ObjectMarking::IsBlackOrGrey(object, marking_state(object))); | 217 CHECK(ObjectMarking::IsBlackOrGrey(object, marking_state(object))); |
203 } | 218 } |
204 } | 219 } |
205 } | 220 } |
206 }; | 221 }; |
207 | 222 |
208 class EvacuationVerifier : public ObjectVisitor { | 223 class VerifyEvacuationVisitor : public ObjectVisitor { |
209 public: | 224 public: |
210 virtual void Run() = 0; | |
211 | |
212 void VisitPointers(Object** start, Object** end) override { | 225 void VisitPointers(Object** start, Object** end) override { |
213 for (Object** current = start; current < end; current++) { | 226 for (Object** current = start; current < end; current++) { |
214 if ((*current)->IsHeapObject()) { | 227 if ((*current)->IsHeapObject()) { |
215 HeapObject* object = HeapObject::cast(*current); | 228 HeapObject* object = HeapObject::cast(*current); |
216 CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(object)); | 229 CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(object)); |
217 } | 230 } |
218 } | 231 } |
219 } | 232 } |
220 | |
221 protected: | |
222 explicit EvacuationVerifier(Heap* heap) : heap_(heap) {} | |
223 | |
224 void VerifyRoots(VisitMode mode); | |
225 void VerifyEvacuationOnPage(Address start, Address end); | |
226 void VerifyEvacuation(NewSpace* new_space); | |
227 void VerifyEvacuation(PagedSpace* paged_space); | |
228 | |
229 Heap* heap_; | |
230 }; | 233 }; |
231 | 234 |
232 void EvacuationVerifier::VerifyRoots(VisitMode mode) { | |
233 heap_->IterateStrongRoots(this, mode); | |
234 } | |
235 | 235 |
236 void EvacuationVerifier::VerifyEvacuationOnPage(Address start, Address end) { | 236 static void VerifyEvacuation(Page* page) { |
237 Address current = start; | 237 VerifyEvacuationVisitor visitor; |
238 while (current < end) { | 238 HeapObjectIterator iterator(page); |
239 HeapObject* object = HeapObject::FromAddress(current); | 239 for (HeapObject* heap_object = iterator.Next(); heap_object != NULL; |
240 if (!object->IsFiller()) object->Iterate(this); | 240 heap_object = iterator.Next()) { |
241 current += object->Size(); | 241 // We skip free space objects. |
| 242 if (!heap_object->IsFiller()) { |
| 243 heap_object->Iterate(&visitor); |
| 244 } |
242 } | 245 } |
243 } | 246 } |
244 | 247 |
245 void EvacuationVerifier::VerifyEvacuation(NewSpace* space) { | 248 |
| 249 static void VerifyEvacuation(NewSpace* space) { |
| 250 VerifyEvacuationVisitor visitor; |
246 PageRange range(space->bottom(), space->top()); | 251 PageRange range(space->bottom(), space->top()); |
247 for (auto it = range.begin(); it != range.end();) { | 252 for (auto it = range.begin(); it != range.end();) { |
248 Page* page = *(it++); | 253 Page* page = *(it++); |
249 Address current = page->area_start(); | 254 Address current = page->area_start(); |
250 Address limit = it != range.end() ? page->area_end() : space->top(); | 255 Address limit = it != range.end() ? page->area_end() : space->top(); |
251 CHECK(limit == space->top() || !page->Contains(space->top())); | 256 CHECK(limit == space->top() || !page->Contains(space->top())); |
252 VerifyEvacuationOnPage(current, limit); | 257 while (current < limit) { |
| 258 HeapObject* object = HeapObject::FromAddress(current); |
| 259 object->Iterate(&visitor); |
| 260 current += object->Size(); |
| 261 } |
253 } | 262 } |
254 } | 263 } |
255 | 264 |
256 void EvacuationVerifier::VerifyEvacuation(PagedSpace* space) { | 265 |
257 if (FLAG_use_allocation_folding && (space == heap_->old_space())) { | 266 static void VerifyEvacuation(Heap* heap, PagedSpace* space) { |
| 267 if (FLAG_use_allocation_folding && (space == heap->old_space())) { |
258 return; | 268 return; |
259 } | 269 } |
260 for (Page* p : *space) { | 270 for (Page* p : *space) { |
261 if (p->IsEvacuationCandidate()) continue; | 271 if (p->IsEvacuationCandidate()) continue; |
262 VerifyEvacuationOnPage(p->area_start(), p->area_end()); | 272 VerifyEvacuation(p); |
263 } | 273 } |
264 } | 274 } |
265 | 275 |
266 class FullEvacuationVerifier : public EvacuationVerifier { | |
267 public: | |
268 explicit FullEvacuationVerifier(Heap* heap) : EvacuationVerifier(heap) {} | |
269 | 276 |
270 void Run() override { | 277 static void VerifyEvacuation(Heap* heap) { |
271 VerifyRoots(VISIT_ALL); | 278 VerifyEvacuation(heap, heap->old_space()); |
272 VerifyEvacuation(heap_->new_space()); | 279 VerifyEvacuation(heap, heap->code_space()); |
273 VerifyEvacuation(heap_->old_space()); | 280 VerifyEvacuation(heap, heap->map_space()); |
274 VerifyEvacuation(heap_->code_space()); | 281 VerifyEvacuation(heap->new_space()); |
275 VerifyEvacuation(heap_->map_space()); | |
276 } | |
277 }; | |
278 | 282 |
279 } // namespace | 283 VerifyEvacuationVisitor visitor; |
| 284 heap->IterateStrongRoots(&visitor, VISIT_ALL); |
| 285 } |
280 #endif // VERIFY_HEAP | 286 #endif // VERIFY_HEAP |
281 | 287 |
282 // ============================================================================= | |
283 // MarkCompactCollector | |
284 // ============================================================================= | |
285 | |
286 MarkCompactCollector::MarkCompactCollector(Heap* heap) | |
287 : // NOLINT | |
288 heap_(heap), | |
289 page_parallel_job_semaphore_(0), | |
290 #ifdef DEBUG | |
291 state_(IDLE), | |
292 #endif | |
293 was_marked_incrementally_(false), | |
294 evacuation_(false), | |
295 compacting_(false), | |
296 black_allocation_(false), | |
297 have_code_to_deoptimize_(false), | |
298 marking_deque_(heap), | |
299 code_flusher_(nullptr), | |
300 sweeper_(heap) { | |
301 } | |
302 | 288 |
303 void MarkCompactCollector::SetUp() { | 289 void MarkCompactCollector::SetUp() { |
304 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0); | 290 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0); |
305 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0); | 291 DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0); |
306 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0); | 292 DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0); |
307 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0); | 293 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0); |
308 marking_deque()->SetUp(); | 294 marking_deque()->SetUp(); |
309 | 295 |
310 if (FLAG_flush_code) { | 296 if (FLAG_flush_code) { |
311 code_flusher_ = new CodeFlusher(isolate()); | 297 code_flusher_ = new CodeFlusher(isolate()); |
(...skipping 281 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
593 void MarkCompactCollector::EnsureSweepingCompleted() { | 579 void MarkCompactCollector::EnsureSweepingCompleted() { |
594 if (!sweeper().sweeping_in_progress()) return; | 580 if (!sweeper().sweeping_in_progress()) return; |
595 | 581 |
596 sweeper().EnsureCompleted(); | 582 sweeper().EnsureCompleted(); |
597 heap()->old_space()->RefillFreeList(); | 583 heap()->old_space()->RefillFreeList(); |
598 heap()->code_space()->RefillFreeList(); | 584 heap()->code_space()->RefillFreeList(); |
599 heap()->map_space()->RefillFreeList(); | 585 heap()->map_space()->RefillFreeList(); |
600 | 586 |
601 #ifdef VERIFY_HEAP | 587 #ifdef VERIFY_HEAP |
602 if (FLAG_verify_heap && !evacuation()) { | 588 if (FLAG_verify_heap && !evacuation()) { |
603 FullEvacuationVerifier verifier(heap_); | 589 VerifyEvacuation(heap_); |
604 verifier.Run(); | |
605 } | 590 } |
606 #endif | 591 #endif |
607 | 592 |
608 if (heap()->memory_allocator()->unmapper()->has_delayed_chunks()) | 593 if (heap()->memory_allocator()->unmapper()->has_delayed_chunks()) |
609 heap()->memory_allocator()->unmapper()->FreeQueuedChunks(); | 594 heap()->memory_allocator()->unmapper()->FreeQueuedChunks(); |
610 } | 595 } |
611 | 596 |
612 bool MarkCompactCollector::Sweeper::AreSweeperTasksRunning() { | 597 bool MarkCompactCollector::Sweeper::AreSweeperTasksRunning() { |
613 return num_sweeping_tasks_.Value() != 0; | 598 return num_sweeping_tasks_.Value() != 0; |
614 } | 599 } |
(...skipping 3104 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3719 } | 3704 } |
3720 } | 3705 } |
3721 | 3706 |
3722 { | 3707 { |
3723 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_EPILOGUE); | 3708 TRACE_GC(heap()->tracer(), GCTracer::Scope::MC_EVACUATE_EPILOGUE); |
3724 EvacuateEpilogue(); | 3709 EvacuateEpilogue(); |
3725 } | 3710 } |
3726 | 3711 |
3727 #ifdef VERIFY_HEAP | 3712 #ifdef VERIFY_HEAP |
3728 if (FLAG_verify_heap && !sweeper().sweeping_in_progress()) { | 3713 if (FLAG_verify_heap && !sweeper().sweeping_in_progress()) { |
3729 FullEvacuationVerifier verifier(heap()); | 3714 VerifyEvacuation(heap()); |
3730 verifier.Run(); | |
3731 } | 3715 } |
3732 #endif | 3716 #endif |
3733 } | 3717 } |
3734 | 3718 |
3735 template <RememberedSetType type> | 3719 template <RememberedSetType type> |
3736 class PointerUpdateJobTraits { | 3720 class PointerUpdateJobTraits { |
3737 public: | 3721 public: |
3738 typedef int PerPageData; // Per page data is not used in this job. | 3722 typedef int PerPageData; // Per page data is not used in this job. |
3739 typedef int PerTaskData; // Per task data is not used in this job. | 3723 typedef int PerTaskData; // Per task data is not used in this job. |
3740 | 3724 |
(...skipping 427 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4168 // The target is always in old space, we don't have to record the slot in | 4152 // The target is always in old space, we don't have to record the slot in |
4169 // the old-to-new remembered set. | 4153 // the old-to-new remembered set. |
4170 DCHECK(!heap()->InNewSpace(target)); | 4154 DCHECK(!heap()->InNewSpace(target)); |
4171 RecordRelocSlot(host, &rinfo, target); | 4155 RecordRelocSlot(host, &rinfo, target); |
4172 } | 4156 } |
4173 } | 4157 } |
4174 } | 4158 } |
4175 | 4159 |
4176 } // namespace internal | 4160 } // namespace internal |
4177 } // namespace v8 | 4161 } // namespace v8 |
OLD | NEW |