Chromium Code Reviews| Index: src/heap/mark-compact.cc |
| diff --git a/src/heap/mark-compact.cc b/src/heap/mark-compact.cc |
| index 73efe887d799009c37045513b6f83778998545db..7c585522803126e23d7e9f6cd990790c9a85195a 100644 |
| --- a/src/heap/mark-compact.cc |
| +++ b/src/heap/mark-compact.cc |
| @@ -45,7 +45,6 @@ MarkCompactCollector::MarkCompactCollector(Heap* heap) |
| finalize_incremental_marking_(false), |
| marking_parity_(ODD_MARKING_PARITY), |
| compacting_(false), |
| - was_marked_incrementally_(false), |
| sweeping_in_progress_(false), |
| pending_sweeper_jobs_semaphore_(0), |
| evacuation_(false), |
| @@ -787,7 +786,7 @@ void MarkCompactCollector::AbortCompaction() { |
| void MarkCompactCollector::Prepare() { |
| - was_marked_incrementally_ = heap()->incremental_marking()->IsMarking(); |
| + bool was_marked_incrementally = heap()->incremental_marking()->IsMarking(); |
| #ifdef DEBUG |
| DCHECK(state_ == IDLE); |
| @@ -802,18 +801,18 @@ void MarkCompactCollector::Prepare() { |
| } |
| // Clear marking bits if incremental marking is aborted. |
| - if (was_marked_incrementally_ && abort_incremental_marking_) { |
| + if (was_marked_incrementally && abort_incremental_marking_) { |
| heap()->incremental_marking()->Stop(); |
| ClearMarkbits(); |
| AbortWeakCollections(); |
| AbortWeakCells(); |
| AbortCompaction(); |
| - was_marked_incrementally_ = false; |
| + was_marked_incrementally = false; |
| } |
| // Don't start compaction if we are in the middle of incremental |
| // marking cycle. We did not collect any slots. |
| - if (!FLAG_never_compact && !was_marked_incrementally_) { |
| + if (!FLAG_never_compact && !was_marked_incrementally) { |
| StartCompaction(NON_INCREMENTAL_COMPACTION); |
| } |
| @@ -824,7 +823,7 @@ void MarkCompactCollector::Prepare() { |
| } |
| #ifdef VERIFY_HEAP |
| - if (!was_marked_incrementally_ && FLAG_verify_heap) { |
| + if (!was_marked_incrementally && FLAG_verify_heap) { |
| VerifyMarkbitsAreClean(); |
| } |
| #endif |
| @@ -2238,16 +2237,16 @@ void MarkCompactCollector::MarkLiveObjects() { |
| PostponeInterruptsScope postpone(isolate()); |
| IncrementalMarking* incremental_marking = heap_->incremental_marking(); |
|
Hannes Payer (out of office)
2015/08/07 09:57:31
It would be nicer to move the chunk of code that d
Michael Starzinger
2015/08/07 10:01:54
Acknowledged. Would you be fine with doing that in
Hannes Payer (out of office)
2015/08/07 10:39:23
let's just do it in this one.
|
| - if (was_marked_incrementally_) { |
| + if (incremental_marking->IsMarking()) { |
| incremental_marking->Finalize(); |
| } else { |
| - // Abort any pending incremental activities e.g. incremental sweeping. |
| - incremental_marking->Stop(); |
|
Hannes Payer (out of office)
2015/08/07 09:57:31
stop is now not called for non-aborting GCs. We sh
Michael Starzinger
2015/08/07 10:01:54
Done. Yep, my think-o, the try-jobs caught that.
|
| if (marking_deque_.in_use()) { |
| marking_deque_.Uninitialize(true); |
| } |
| } |
| + DCHECK(incremental_marking->IsStopped()); |
| + |
| #ifdef DEBUG |
| DCHECK(state_ == PREPARE_GC); |
| state_ = MARK_LIVE_OBJECTS; |
| @@ -3691,7 +3690,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { |
| SlotsBuffer::SizeOfChain(migration_slots_buffer_)); |
| } |
| - if (compacting_ && was_marked_incrementally_) { |
| + if (compacting_ && heap()->incremental_marking()->IsMarking()) { |
| GCTracer::Scope gc_scope(heap()->tracer(), |
| GCTracer::Scope::MC_RESCAN_LARGE_OBJECTS); |
| // It's difficult to filter out slots recorded for large objects. |