| Index: src/heap/heap.cc
|
| diff --git a/src/heap/heap.cc b/src/heap/heap.cc
|
| index 361b4d91d7ffbdbf128fb05f6dde00e27f84e0a7..e1d01f5423b821427d2fc5e0b3cb3c61f4ca8344 100644
|
| --- a/src/heap/heap.cc
|
| +++ b/src/heap/heap.cc
|
| @@ -530,7 +530,8 @@ void Heap::RepairFreeListsAfterDeserialization() {
|
| }
|
|
|
|
|
| -void Heap::ProcessPretenuringFeedback() {
|
| +bool Heap::ProcessPretenuringFeedback() {
|
| + bool trigger_deoptimization = false;
|
| if (FLAG_allocation_site_pretenuring) {
|
| int tenure_decisions = 0;
|
| int dont_tenure_decisions = 0;
|
| @@ -551,7 +552,6 @@ void Heap::ProcessPretenuringFeedback() {
|
|
|
| int i = 0;
|
| Object* list_element = allocation_sites_list();
|
| - bool trigger_deoptimization = false;
|
| bool maximum_size_scavenge = MaximumSizeScavenge();
|
| while (use_scratchpad ? i < allocation_sites_scratchpad_length_
|
| : list_element->IsAllocationSite()) {
|
| @@ -603,6 +603,7 @@ void Heap::ProcessPretenuringFeedback() {
|
| dont_tenure_decisions);
|
| }
|
| }
|
| + return trigger_deoptimization;
|
| }
|
|
|
|
|
| @@ -632,9 +633,6 @@ void Heap::GarbageCollectionEpilogue() {
|
| ZapFromSpace();
|
| }
|
|
|
| - // Process pretenuring feedback and update allocation sites.
|
| - ProcessPretenuringFeedback();
|
| -
|
| #ifdef VERIFY_HEAP
|
| if (FLAG_verify_heap) {
|
| Verify();
|
| @@ -1250,9 +1248,14 @@ bool Heap::PerformGarbageCollection(
|
| Scavenge();
|
| }
|
|
|
| -
|
| + bool deopted = ProcessPretenuringFeedback();
|
| UpdateSurvivalStatistics(start_new_space_size);
|
| - ConfigureNewGenerationSize();
|
| +
|
| + // When pretenuring is collecting new feedback, we do not shrink the new space
|
| + // right away.
|
| + if (!deopted) {
|
| + ConfigureNewGenerationSize();
|
| + }
|
| ConfigureInitialOldGenerationSize();
|
|
|
| isolate_->counters()->objs_since_last_young()->Set(0);
|
|
|