Chromium Code Reviews| Index: runtime/vm/pages.cc |
| diff --git a/runtime/vm/pages.cc b/runtime/vm/pages.cc |
| index 2f894b53a2bfc9ed7c7710aed1d831533190203f..f96f737d7fe7e923d8141615fef2b5b9c514cf04 100644 |
| --- a/runtime/vm/pages.cc |
| +++ b/runtime/vm/pages.cc |
| @@ -1166,7 +1166,8 @@ void PageSpaceController::EvaluateGarbageCollection( |
| before.used_in_words - last_usage_.used_in_words; |
| intptr_t garbage = before.used_in_words - after.used_in_words; |
|
zra
2016/08/08 14:59:37
const
ASSERT(garbage >= 0);
siva
2016/08/11 17:32:48
Done.
|
| double k = garbage / static_cast<double>(allocated_since_previous_gc); |
|
zra
2016/08/08 14:59:37
const
siva
2016/08/11 17:32:48
Done.
|
| - heap_->RecordData(PageSpace::kGarbageRatio, static_cast<int>(k * 100)); |
| + int garbage_ratio = static_cast<int>(k * 100); |
|
zra
2016/08/08 14:59:37
const intptr_t
siva
2016/08/11 17:32:48
Added const, change to intptr_t would be coupled w
|
| + heap_->RecordData(PageSpace::kGarbageRatio, garbage_ratio); |
| // Define GC to be 'worthwhile' iff at least fraction t of heap is garbage. |
| double t = 1.0 - desired_utilization_; |
| @@ -1175,16 +1176,39 @@ void PageSpaceController::EvaluateGarbageCollection( |
| t += (gc_time_fraction - garbage_collection_time_ratio_) / 100.0; |
| } |
| - // Find minimum 'grow_heap_' such that after increasing capacity by |
| - // 'grow_heap_' pages and filling them, we expect a GC to be worthwhile. |
| - for (grow_heap_ = 0; grow_heap_ < heap_growth_max_; ++grow_heap_) { |
| - intptr_t limit = |
| - after.capacity_in_words + (grow_heap_ * PageSpace::kPageSizeInWords); |
| - intptr_t allocated_before_next_gc = limit - after.used_in_words; |
| - double estimated_garbage = k * allocated_before_next_gc; |
| - if (t <= estimated_garbage / limit) { |
| - break; |
| + if (garbage_ratio == 0) { |
| + // No garbage in the previous cycle so it would be hard to compute a |
| + // grow_heap_ size based on estimated garbage so we use growth ratio |
| + // instead. |
| + intptr_t grow_ratio = |
|
zra
2016/08/08 14:59:37
const
siva
2016/08/11 17:32:48
Done.
|
| + (static_cast<intptr_t>(after.capacity_in_words / t) - |
| + after.capacity_in_words) / PageSpace::kPageSizeInWords; |
| + grow_heap_ = heap_growth_max_; |
| + grow_heap_ = Utils::Maximum(grow_heap_, grow_ratio); |
|
zra
2016/08/08 14:59:37
grow_heap_ = Utils::Maximum(heap_growth_max_, grow
siva
2016/08/11 17:32:48
Done but had to introduce a static_cast as heap_gr
|
| + } else { |
| + // Find minimum 'grow_heap_' such that after increasing capacity by |
| + // 'grow_heap_' pages and filling them, we expect a GC to be worthwhile. |
| + intptr_t max = heap_growth_max_; |
| + intptr_t min = 0; |
| + intptr_t adjustment = 0; |
| + // Find minimum 'grow_heap_' such that after increasing capacity by |
|
zra
2016/08/08 14:59:37
Duplicated comment
siva
2016/08/11 17:32:48
Removed duplicate.
|
| + // 'grow_heap_' pages and filling them, we expect a GC to be worthwhile. |
| + grow_heap_ = 0; |
| + while (min < max) { |
| + grow_heap_ = (max + min) / 2; |
| + intptr_t limit = |
|
zra
2016/08/08 14:59:37
const
siva
2016/08/11 17:32:48
Done.
|
| + after.capacity_in_words + (grow_heap_ * PageSpace::kPageSizeInWords); |
| + intptr_t allocated_before_next_gc = limit - after.used_in_words; |
|
zra
2016/08/08 14:59:37
const
siva
2016/08/11 17:32:48
Done.
|
| + double estimated_garbage = k * allocated_before_next_gc; |
|
zra
2016/08/08 14:59:37
const
siva
2016/08/11 17:32:48
Done.
|
| + if (t <= estimated_garbage / limit) { |
| + max = grow_heap_ - 1; |
| + adjustment = -1; |
| + } else { |
| + min = grow_heap_ + 1; |
| + adjustment = 1; |
| + } |
| } |
| + grow_heap_ += adjustment; |
| } |
| heap_->RecordData(PageSpace::kPageGrowth, grow_heap_); |