Chromium Code Reviews| Index: src/heap/mark-compact.cc |
| diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc |
| index 73efe887d799009c37045513b6f83778998545db..58377dab0f664682444824bdd59be84ac5adc5ca 100644 |
| --- a/src/heap/mark-compact.cc |
| +++ b/src/heap/mark-compact.cc |
| @@ -45,7 +45,6 @@ MarkCompactCollector::MarkCompactCollector(Heap* heap) |
| finalize_incremental_marking_(false), |
| marking_parity_(ODD_MARKING_PARITY), |
| compacting_(false), |
| - was_marked_incrementally_(false), |
| sweeping_in_progress_(false), |
| pending_sweeper_jobs_semaphore_(0), |
| evacuation_(false), |
| @@ -338,11 +337,10 @@ void MarkCompactCollector::CollectGarbage() { |
| // Make sure that Prepare() has been called. The individual steps below will |
| // update the state as they proceed. |
| DCHECK(state_ == PREPARE_GC); |
| + DCHECK(heap_->incremental_marking()->IsStopped()); |
| MarkLiveObjects(); |
| - DCHECK(heap_->incremental_marking()->IsStopped()); |
| - |
| // ClearNonLiveReferences can deoptimize code in dependent code arrays. |
| // Process weak cells before so that weak cells in dependent code |
| // arrays are cleared or contain only live code objects. |
| @@ -787,7 +785,7 @@ void MarkCompactCollector::AbortCompaction() { |
| void MarkCompactCollector::Prepare() { |
| - was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); |
| + bool was_marked_incrementally = heap()->incremental_marking()->IsMarking(); |
| #ifdef DEBUG |
| DCHECK(state_ == IDLE); |
| @@ -802,18 +800,28 @@ void MarkCompactCollector::Prepare() { |
| } |
| // Clear marking bits if incremental marking is aborted. |
| - if (was_marked_incrementally_ && abort_incremental_marking_) { |
| + if (was_marked_incrementally && abort_incremental_marking_) { |
| heap()->incremental_marking()->Stop(); |
| ClearMarkbits(); |
| AbortWeakCollections(); |
| AbortWeakCells(); |
| AbortCompaction(); |
| - was_marked_incrementally_ = false; |
| + was_marked_incrementally = false; |
| + } |
| + |
| + if (was_marked_incrementally) { |
| + heap()->incremental_marking()->Finalize(); |
| + } else { |
| + // Abort any pending incremental activities e.g. incremental sweeping. |
| + heap()->incremental_marking()->Stop(); |
|
Hannes Payer (out of office)
2015/08/07 10:53:38
in that case, stop was already performed before.
|
| + if (marking_deque_.in_use()) { |
| + marking_deque_.Uninitialize(true); |
|
Hannes Payer (out of office)
2015/08/07 10:53:38
Why are we doing this just in this case?
|
| + } |
| } |
| // Don't start compaction if we are in the middle of incremental |
| // marking cycle. We did not collect any slots. |
| - if (!FLAG_never_compact && !was_marked_incrementally_) { |
| + if (!FLAG_never_compact && !was_marked_incrementally) { |
| StartCompaction(NON_INCREMENTAL_COMPACTION); |
| } |
| @@ -824,7 +832,7 @@ void MarkCompactCollector::Prepare() { |
| } |
| #ifdef VERIFY_HEAP |
| - if (!was_marked_incrementally_ && FLAG_verify_heap) { |
| + if (!was_marked_incrementally && FLAG_verify_heap) { |
| VerifyMarkbitsAreClean(); |
| } |
| #endif |
| @@ -2237,17 +2245,6 @@ void MarkCompactCollector::MarkLiveObjects() { |
| // with the C stack limit check. |
| PostponeInterruptsScope postpone(isolate()); |
| - IncrementalMarking* incremental_marking = heap_->incremental_marking(); |
| - if (was_marked_incrementally_) { |
| - incremental_marking->Finalize(); |
| - } else { |
| - // Abort any pending incremental activities e.g. incremental sweeping. |
| - incremental_marking->Stop(); |
| - if (marking_deque_.in_use()) { |
| - marking_deque_.Uninitialize(true); |
| - } |
| - } |
| - |
| #ifdef DEBUG |
| DCHECK(state_ == PREPARE_GC); |
| state_ = MARK_LIVE_OBJECTS; |
| @@ -3691,7 +3688,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
| SlotsBuffer::SizeOfChain(migration_slots_buffer_)); |
| } |
| - if (compacting_ && was_marked_incrementally_) { |
| + if (compacting_ && heap()->incremental_marking()->IsMarking()) { |
| GCTracer::Scope gc_scope(heap()->tracer(), |
| GCTracer::Scope::MC_RESCAN_LARGE_OBJECTS); |
| // It's difficult to filter out slots recorded for large objects. |