Index: src/incremental-marking.cc |
diff --git a/src/incremental-marking.cc b/src/incremental-marking.cc |
index 8e2eb62357664d39f378530af1408e0f6202b9b9..11a578ddc2b7f93965c18d4b1adb20c836c7bd4f 100644 |
--- a/src/incremental-marking.cc |
+++ b/src/incremental-marking.cc |
@@ -536,7 +536,7 @@ void IncrementalMarking::Start(CompactionFlag flag) { |
ResetStepCounters(); |
- if (!heap_->mark_compact_collector()->IsConcurrentSweepingInProgress()) { |
+ if (!heap_->mark_compact_collector()->sweeping_pending()) { |
StartMarking(flag); |
} else { |
if (FLAG_trace_incremental_marking) { |
@@ -883,11 +883,11 @@ void IncrementalMarking::Step(intptr_t allocated_bytes, |
} |
if (state_ == SWEEPING) { |
- if (heap_->mark_compact_collector()->IsConcurrentSweepingInProgress() && |
+ if (heap_->mark_compact_collector()->sweeping_pending() && |
Jarin
2014/07/11 07:40:52
As discussed offline, perhaps we should only compl
Hannes Payer (out of office)
2014/07/11 09:36:13
On our benchmark it seems to be fine. I left a com
|
heap_->mark_compact_collector()->IsSweepingCompleted()) { |
heap_->mark_compact_collector()->WaitUntilSweepingCompleted(); |
} |
- if (!heap_->mark_compact_collector()->IsConcurrentSweepingInProgress()) { |
+ if (!heap_->mark_compact_collector()->sweeping_pending()) { |
bytes_scanned_ = 0; |
StartMarking(PREVENT_COMPACTION); |
} |