Index: src/heap.cc |
diff --git a/src/heap.cc b/src/heap.cc |
index 443c926d95b9eff79e26e56a0294564174b5473c..3c3716acfdc2faa7063806166dfe2bd5f8d54ad9 100644 |
--- a/src/heap.cc |
+++ b/src/heap.cc |
@@ -55,7 +55,6 @@ namespace internal { |
String* Heap::hidden_symbol_; |
Object* Heap::roots_[Heap::kRootListLength]; |
- |
NewSpace Heap::new_space_; |
OldSpace* Heap::old_pointer_space_ = NULL; |
OldSpace* Heap::old_data_space_ = NULL; |
@@ -64,9 +63,6 @@ MapSpace* Heap::map_space_ = NULL; |
CellSpace* Heap::cell_space_ = NULL; |
LargeObjectSpace* Heap::lo_space_ = NULL; |
-static const int kMinimumPromotionLimit = 2*MB; |
-static const int kMinimumAllocationLimit = 8*MB; |
- |
int Heap::old_gen_promotion_limit_ = kMinimumPromotionLimit; |
int Heap::old_gen_allocation_limit_ = kMinimumAllocationLimit; |
@@ -405,17 +401,26 @@ void Heap::GarbageCollectionEpilogue() { |
} |
-void Heap::CollectAllGarbage(bool force_compaction) { |
+void Heap::CollectAllGarbage(bool force_compaction, |
+ CollectionPolicy collectionPolicy) { |
// Since we are ignoring the return value, the exact choice of space does |
// not matter, so long as we do not specify NEW_SPACE, which would not |
// cause a full GC. |
MarkCompactCollector::SetForceCompaction(force_compaction); |
- CollectGarbage(0, OLD_POINTER_SPACE); |
+ CollectGarbage(0, OLD_POINTER_SPACE, collectionPolicy); |
MarkCompactCollector::SetForceCompaction(false); |
} |
-bool Heap::CollectGarbage(int requested_size, AllocationSpace space) { |
+void Heap::CollectAllAvailableGarbage() { |
+ CompilationCache::Clear(); |
+ CollectAllGarbage(true, AGGRESSIVE); |
+} |
+ |
+ |
+bool Heap::CollectGarbage(int requested_size, |
+ AllocationSpace space, |
+ CollectionPolicy collectionPolicy) { |
// The VM is in the GC state until exiting this function. |
VMState state(GC); |
@@ -442,7 +447,7 @@ bool Heap::CollectGarbage(int requested_size, AllocationSpace space) { |
? &Counters::gc_scavenger |
: &Counters::gc_compactor; |
rate->Start(); |
- PerformGarbageCollection(space, collector, &tracer); |
+ PerformGarbageCollection(collector, &tracer, collectionPolicy); |
rate->Stop(); |
GarbageCollectionEpilogue(); |
@@ -475,7 +480,7 @@ bool Heap::CollectGarbage(int requested_size, AllocationSpace space) { |
void Heap::PerformScavenge() { |
GCTracer tracer; |
- PerformGarbageCollection(NEW_SPACE, SCAVENGER, &tracer); |
+ PerformGarbageCollection(SCAVENGER, &tracer, NORMAL); |
} |
@@ -664,9 +669,9 @@ void Heap::UpdateSurvivalRateTrend(int start_new_space_size) { |
survival_rate_ = survival_rate; |
} |
-void Heap::PerformGarbageCollection(AllocationSpace space, |
- GarbageCollector collector, |
- GCTracer* tracer) { |
+void Heap::PerformGarbageCollection(GarbageCollector collector, |
+ GCTracer* tracer, |
+ CollectionPolicy collectionPolicy) { |
VerifySymbolTable(); |
if (collector == MARK_COMPACTOR && global_gc_prologue_callback_) { |
ASSERT(!allocation_allowed_); |
@@ -696,25 +701,44 @@ void Heap::PerformGarbageCollection(AllocationSpace space, |
UpdateSurvivalRateTrend(start_new_space_size); |
- int old_gen_size = PromotedSpaceSize(); |
- old_gen_promotion_limit_ = |
- old_gen_size + Max(kMinimumPromotionLimit, old_gen_size / 3); |
- old_gen_allocation_limit_ = |
- old_gen_size + Max(kMinimumAllocationLimit, old_gen_size / 2); |
- |
- if (high_survival_rate_during_scavenges && |
- IsStableOrIncreasingSurvivalTrend()) { |
- // Stable high survival rates of young objects both during partial and |
- // full collection indicate that mutator is either building or modifying |
- // a structure with a long lifetime. |
- // In this case we aggressively raise old generation memory limits to |
- // postpone subsequent mark-sweep collection and thus trade memory |
- // space for the mutation speed. |
- old_gen_promotion_limit_ *= 2; |
- old_gen_allocation_limit_ *= 2; |
+ UpdateOldSpaceLimits(); |
+ |
+ // Major GC would invoke weak handle callbacks on weakly reachable |
+ // handles, but won't collect weakly reachable objects until next |
+ // major GC. Therefore if we collect aggressively and weak handle callback |
+ // has been invoked, we rerun major GC to release objects which become |
+ // garbage. |
+ if (collectionPolicy == AGGRESSIVE) { |
+ // Note: as weak callbacks can execute arbitrary code, we cannot |
+ // hope that eventually there will be no weak callbacks invocations. |
+ // Therefore stop recollecting after several attempts. |
+ const int kMaxNumberOfAttempts = 7; |
+ for (int attempt = 0; attempt < kMaxNumberOfAttempts; attempt++) { |
+ { DisableAssertNoAllocation allow_allocation; |
+ GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); |
+ if (!GlobalHandles::PostGarbageCollectionProcessing()) break; |
+ } |
+ MarkCompact(tracer); |
+ // Weak handle callbacks can allocate data, so keep limits correct. |
+ UpdateOldSpaceLimits(); |
+ } |
+ } else { |
+ if (high_survival_rate_during_scavenges && |
+ IsStableOrIncreasingSurvivalTrend()) { |
+ // Stable high survival rates of young objects both during partial and |
+ // full collection indicate that mutator is either building or modifying |
+ // a structure with a long lifetime. |
+ // In this case we aggressively raise old generation memory limits to |
+ // postpone subsequent mark-sweep collection and thus trade memory |
+ // space for the mutation speed. |
+ old_gen_promotion_limit_ *= 2; |
+ old_gen_allocation_limit_ *= 2; |
+ } |
+ } |
+ { DisableAssertNoAllocation allow_allocation; |
Vyacheslav Egorov (Chromium)
2010/09/14 17:08:42
Empty line before this one might help readability.
|
+ GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); |
+ GlobalHandles::PostGarbageCollectionProcessing(); |
} |
- |
- old_gen_exhausted_ = false; |
} else { |
tracer_ = tracer; |
Scavenge(); |
@@ -725,12 +749,6 @@ void Heap::PerformGarbageCollection(AllocationSpace space, |
Counters::objs_since_last_young.Set(0); |
- if (collector == MARK_COMPACTOR) { |
- DisableAssertNoAllocation allow_allocation; |
- GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); |
- GlobalHandles::PostGarbageCollectionProcessing(); |
- } |
- |
// Update relocatables. |
Relocatable::PostGarbageCollectionProcessing(); |