Index: src/heap/incremental-marking.cc |
diff --git a/src/heap/incremental-marking.cc b/src/heap/incremental-marking.cc |
index f2ce5e155f5f80c1b92fa91724d6d11ac0ede04f..56cd688446ba92f3d568b60164bcc91bb612dade 100644 |
--- a/src/heap/incremental-marking.cc |
+++ b/src/heap/incremental-marking.cc |
@@ -459,7 +459,7 @@ |
} |
-void IncrementalMarking::Start() { |
+void IncrementalMarking::Start(CompactionFlag flag) { |
if (FLAG_trace_incremental_marking) { |
PrintF("[IncrementalMarking] Start\n"); |
} |
@@ -474,7 +474,7 @@ |
was_activated_ = true; |
if (!heap_->mark_compact_collector()->sweeping_in_progress()) { |
- StartMarking(); |
+ StartMarking(flag); |
} else { |
if (FLAG_trace_incremental_marking) { |
PrintF("[IncrementalMarking] Start sweeping.\n"); |
@@ -486,12 +486,12 @@ |
} |
-void IncrementalMarking::StartMarking() { |
+void IncrementalMarking::StartMarking(CompactionFlag flag) { |
if (FLAG_trace_incremental_marking) { |
PrintF("[IncrementalMarking] Start marking\n"); |
} |
- is_compacting_ = !FLAG_never_compact && |
+ is_compacting_ = !FLAG_never_compact && (flag == ALLOW_COMPACTION) && |
heap_->mark_compact_collector()->StartCompaction( |
MarkCompactCollector::INCREMENTAL_COMPACTION); |
@@ -823,7 +823,9 @@ |
void IncrementalMarking::OldSpaceStep(intptr_t allocated) { |
if (IsStopped() && ShouldActivate()) { |
- Start(); |
+ // TODO(hpayer): Let's play safe for now, but compaction should be |
+ // in principle possible. |
+ Start(PREVENT_COMPACTION); |
} else { |
Step(allocated * kFastMarking / kInitialMarkingSpeed, GC_VIA_STACK_GUARD); |
} |
@@ -952,7 +954,7 @@ |
} |
if (!heap_->mark_compact_collector()->sweeping_in_progress()) { |
bytes_scanned_ = 0; |
- StartMarking(); |
+ StartMarking(PREVENT_COMPACTION); |
} |
} else if (state_ == MARKING) { |
bytes_processed = ProcessMarkingDeque(bytes_to_process); |