Index: src/mark-compact.cc |
diff --git a/src/mark-compact.cc b/src/mark-compact.cc |
index 5f22565fb9268a21bb5e548c9a74c8ed77067d82..18894d2a88c9f0ee0442ed9dcbe39d32e3c5f9ca 100644 |
--- a/src/mark-compact.cc |
+++ b/src/mark-compact.cc |
@@ -543,7 +543,6 @@ void MarkCompactCollector::WaitUntilSweepingCompleted() { |
sweeping_pending_ = false; |
StealMemoryFromSweeperThreads(heap()->paged_space(OLD_DATA_SPACE)); |
StealMemoryFromSweeperThreads(heap()->paged_space(OLD_POINTER_SPACE)); |
- heap()->FreeQueuedChunks(); |
} |
} |
@@ -568,6 +567,13 @@ bool MarkCompactCollector::IsConcurrentSweepingInProgress() { |
} |
+void MarkCompactCollector::FinalizeSweeping() { |
+ ASSERT(sweeping_pending_ == false); |
+ ReleaseEvacuationCandidates(); |
+ heap()->FreeQueuedChunks(); |
+} |
+ |
+ |
void MarkCompactCollector::MarkInParallel() { |
for (int i = 0; i < FLAG_marking_threads; i++) { |
heap()->isolate()->marking_threads()[i]->StartMarking(); |
@@ -887,6 +893,7 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) { |
if (AreSweeperThreadsActivated() && FLAG_concurrent_sweeping) { |
// Instead of waiting we could also abort the sweeper threads here. |
WaitUntilSweepingCompleted(); |
+ FinalizeSweeping(); |
} |
// Clear marking bits if incremental marking is aborted. |
@@ -3280,6 +3287,11 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
slots_buffer_allocator_.DeallocateChain(&migration_slots_buffer_); |
ASSERT(migration_slots_buffer_ == NULL); |
+} |
+ |
+ |
+void MarkCompactCollector::ReleaseEvacuationCandidates() { |
+ int npages = evacuation_candidates_.length(); |
for (int i = 0; i < npages; i++) { |
Page* p = evacuation_candidates_[i]; |
if (!p->IsEvacuationCandidate()) continue; |
@@ -3724,7 +3736,8 @@ void MarkCompactCollector::SweepInParallel(PagedSpace* space, |
void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { |
space->set_was_swept_conservatively(sweeper == CONSERVATIVE || |
sweeper == LAZY_CONSERVATIVE || |
- sweeper == PARALLEL_CONSERVATIVE); |
+ sweeper == PARALLEL_CONSERVATIVE || |
+ sweeper == CONCURRENT_CONSERVATIVE); |
space->ClearStats(); |
PageIterator it(space); |
@@ -3798,6 +3811,7 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { |
lazy_sweeping_active = true; |
break; |
} |
+ case CONCURRENT_CONSERVATIVE: |
case PARALLEL_CONSERVATIVE: { |
if (FLAG_gc_verbose) { |
PrintF("Sweeping 0x%" V8PRIxPTR " conservatively in parallel.\n", |
@@ -3843,7 +3857,8 @@ void MarkCompactCollector::SweepSpaces() { |
#endif |
SweeperType how_to_sweep = |
FLAG_lazy_sweeping ? LAZY_CONSERVATIVE : CONSERVATIVE; |
- if (AreSweeperThreadsActivated()) how_to_sweep = PARALLEL_CONSERVATIVE; |
+ if (FLAG_parallel_sweeping) how_to_sweep = PARALLEL_CONSERVATIVE; |
+ if (FLAG_concurrent_sweeping) how_to_sweep = CONCURRENT_CONSERVATIVE; |
if (FLAG_expose_gc) how_to_sweep = CONSERVATIVE; |
if (sweep_precisely_) how_to_sweep = PRECISE; |
// Noncompacting collections simply sweep the spaces to clear the mark |
@@ -3855,12 +3870,14 @@ void MarkCompactCollector::SweepSpaces() { |
SweepSpace(heap()->old_pointer_space(), how_to_sweep); |
SweepSpace(heap()->old_data_space(), how_to_sweep); |
- if (how_to_sweep == PARALLEL_CONSERVATIVE) { |
+ if (how_to_sweep == PARALLEL_CONSERVATIVE || |
+ how_to_sweep == CONCURRENT_CONSERVATIVE) { |
// TODO(hpayer): fix race with concurrent sweeper |
StartSweeperThreads(); |
- if (FLAG_parallel_sweeping && !FLAG_concurrent_sweeping) { |
- WaitUntilSweepingCompleted(); |
- } |
+ } |
+ |
+ if (how_to_sweep == PARALLEL_CONSERVATIVE) { |
+ WaitUntilSweepingCompleted(); |
} |
RemoveDeadInvalidatedCode(); |
@@ -3877,6 +3894,10 @@ void MarkCompactCollector::SweepSpaces() { |
// Deallocate unmarked objects and clear marked bits for marked objects. |
heap_->lo_space()->FreeUnmarkedObjects(); |
+ |
+ if (how_to_sweep != CONCURRENT_CONSERVATIVE) { |
+ FinalizeSweeping(); |
+ } |
} |