Index: src/heap/heap.cc |
diff --git a/src/heap/heap.cc b/src/heap/heap.cc |
index 6e6da98d31e2842f202bd0c74132b77323c065b1..13e18d4f298bb58acf86d815bf75be1ee9b59498 100644 |
--- a/src/heap/heap.cc |
+++ b/src/heap/heap.cc |
@@ -134,6 +134,7 @@ Heap::Heap() |
ring_buffer_end_(0), |
promotion_queue_(this), |
configured_(false), |
+ current_gc_flags_(Heap::kNoGCFlags), |
external_string_table_(this), |
chunks_queued_for_free_(NULL), |
gc_callbacks_depth_(0), |
@@ -798,9 +799,9 @@ void Heap::CollectAllGarbage(int flags, const char* gc_reason, |
// Since we are ignoring the return value, the exact choice of space does |
// not matter, so long as we do not specify NEW_SPACE, which would not |
// cause a full GC. |
- mark_compact_collector_.SetFlags(flags); |
+ set_current_gc_flags(flags); |
CollectGarbage(OLD_SPACE, gc_reason, gc_callback_flags); |
- mark_compact_collector_.SetFlags(kNoGCFlags); |
+ set_current_gc_flags(kNoGCFlags); |
} |
@@ -822,8 +823,7 @@ void Heap::CollectAllAvailableGarbage(const char* gc_reason) { |
isolate()->optimizing_compile_dispatcher()->Flush(); |
} |
isolate()->ClearSerializerData(); |
- mark_compact_collector()->SetFlags(kMakeHeapIterableMask | |
- kReduceMemoryFootprintMask); |
+ set_current_gc_flags(kMakeHeapIterableMask | kReduceMemoryFootprintMask); |
isolate_->compilation_cache()->Clear(); |
const int kMaxNumberOfAttempts = 7; |
const int kMinNumberOfAttempts = 2; |
@@ -834,7 +834,7 @@ void Heap::CollectAllAvailableGarbage(const char* gc_reason) { |
break; |
} |
} |
- mark_compact_collector()->SetFlags(kNoGCFlags); |
+ set_current_gc_flags(kNoGCFlags); |
new_space_.Shrink(); |
UncommitFromSpace(); |
} |
@@ -882,10 +882,8 @@ bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason, |
} |
} |
- if (collector == MARK_COMPACTOR && |
- !mark_compact_collector()->finalize_incremental_marking() && |
- !mark_compact_collector()->abort_incremental_marking() && |
- !incremental_marking()->IsStopped() && |
+ if (collector == MARK_COMPACTOR && !ShouldFinalizeIncrementalMarking() && |
+ !ShouldAbortIncrementalMarking() && !incremental_marking()->IsStopped() && |
!incremental_marking()->should_hurry() && FLAG_incremental_marking) { |
// Make progress in incremental marking. |
const intptr_t kStepSizeWhenDelayedByScavenge = 1 * MB; |
@@ -956,8 +954,7 @@ bool Heap::CollectGarbage(GarbageCollector collector, const char* gc_reason, |
// Start incremental marking for the next cycle. The heap snapshot |
// generator needs incremental marking to stay off after it aborted. |
- if (!mark_compact_collector()->abort_incremental_marking() && |
- incremental_marking()->IsStopped() && |
+ if (!ShouldAbortIncrementalMarking() && incremental_marking()->IsStopped() && |
incremental_marking()->ShouldActivateEvenWithoutIdleNotification()) { |
incremental_marking()->Start(kNoGCFlags, kNoGCCallbackFlags, "GC epilogue"); |
} |
@@ -5664,8 +5661,7 @@ void Heap::SetOldGenerationAllocationLimit(intptr_t old_gen_size, |
factor = Min(factor, kConservativeHeapGrowingFactor); |
} |
- if (FLAG_stress_compaction || |
- mark_compact_collector()->reduce_memory_footprint_) { |
+ if (FLAG_stress_compaction || ShouldReduceMemory()) { |
factor = kMinHeapGrowingFactor; |
} |