Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 503 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 514 int allocation_sites = 0; | 514 int allocation_sites = 0; |
| 515 int active_allocation_sites = 0; | 515 int active_allocation_sites = 0; |
| 516 | 516 |
| 517 // If the scratchpad overflowed, we have to iterate over the allocation | 517 // If the scratchpad overflowed, we have to iterate over the allocation |
| 518 // sites list. | 518 // sites list. |
| 519 bool use_scratchpad = | 519 bool use_scratchpad = |
| 520 allocation_sites_scratchpad_length < kAllocationSiteScratchpadSize; | 520 allocation_sites_scratchpad_length < kAllocationSiteScratchpadSize; |
| 521 | 521 |
| 522 int i = 0; | 522 int i = 0; |
| 523 Object* list_element = allocation_sites_list(); | 523 Object* list_element = allocation_sites_list(); |
| 524 bool trigger_deoptimization = false; | |
| 524 while (use_scratchpad ? | 525 while (use_scratchpad ? |
| 525 i < allocation_sites_scratchpad_length : | 526 i < allocation_sites_scratchpad_length : |
| 526 list_element->IsAllocationSite()) { | 527 list_element->IsAllocationSite()) { |
| 527 AllocationSite* site = use_scratchpad ? | 528 AllocationSite* site = use_scratchpad ? |
| 528 allocation_sites_scratchpad[i] : AllocationSite::cast(list_element); | 529 allocation_sites_scratchpad[i] : AllocationSite::cast(list_element); |
| 529 allocation_mementos_found += site->memento_found_count(); | 530 allocation_mementos_found += site->memento_found_count(); |
| 530 if (site->memento_found_count() > 0) { | 531 if (site->memento_found_count() > 0) { |
| 531 active_allocation_sites++; | 532 active_allocation_sites++; |
| 532 } | 533 } |
| 533 if (site->DigestPretenuringFeedback()) { | 534 if (site->DigestPretenuringFeedback()) trigger_deoptimization = true; |
| 534 if (site->GetPretenureMode() == TENURED) { | 535 if (site->GetPretenureMode() == TENURED) { |
|
Benedikt Meurer
2014/01/16 11:19:37
Shouldn't this if still depend on the previous one
Hannes Payer (out of office)
2014/01/16 11:41:20
The semantics of DigestPretenuringFeedback changed
| |
| 535 tenure_decisions++; | 536 tenure_decisions++; |
| 536 } else { | 537 } else { |
| 537 dont_tenure_decisions++; | 538 dont_tenure_decisions++; |
| 538 } | |
| 539 } | 539 } |
| 540 allocation_sites++; | 540 allocation_sites++; |
| 541 if (use_scratchpad) { | 541 if (use_scratchpad) { |
| 542 i++; | 542 i++; |
| 543 } else { | 543 } else { |
| 544 list_element = site->weak_next(); | 544 list_element = site->weak_next(); |
| 545 } | 545 } |
| 546 } | 546 } |
| 547 | |
| 548 if (trigger_deoptimization) isolate_->stack_guard()->DeoptMarkedCode(); | |
| 549 | |
| 547 allocation_sites_scratchpad_length = 0; | 550 allocation_sites_scratchpad_length = 0; |
| 548 | 551 |
| 549 // TODO(mvstanton): Pretenure decisions are only made once for an allocation | 552 // TODO(mvstanton): Pretenure decisions are only made once for an allocation |
| 550 // site. Find a sane way to decide about revisiting the decision later. | 553 // site. Find a sane way to decide about revisiting the decision later. |
| 551 | 554 |
| 552 if (FLAG_trace_track_allocation_sites && | 555 if (FLAG_trace_track_allocation_sites && |
| 553 (allocation_mementos_found > 0 || | 556 (allocation_mementos_found > 0 || |
| 554 tenure_decisions > 0 || | 557 tenure_decisions > 0 || |
| 555 dont_tenure_decisions > 0)) { | 558 dont_tenure_decisions > 0)) { |
| 556 PrintF("GC: (mode, #visited allocation sites, #active allocation sites, " | 559 PrintF("GC: (mode, #visited allocation sites, #active allocation sites, " |
| (...skipping 1434 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1991 AllocationSite* casted = AllocationSite::cast(cur); | 1994 AllocationSite* casted = AllocationSite::cast(cur); |
| 1992 if (casted->GetPretenureMode() == flag) { | 1995 if (casted->GetPretenureMode() == flag) { |
| 1993 casted->ResetPretenureDecision(); | 1996 casted->ResetPretenureDecision(); |
| 1994 bool got_marked = casted->dependent_code()->MarkCodeForDeoptimization( | 1997 bool got_marked = casted->dependent_code()->MarkCodeForDeoptimization( |
| 1995 isolate_, | 1998 isolate_, |
| 1996 DependentCode::kAllocationSiteTenuringChangedGroup); | 1999 DependentCode::kAllocationSiteTenuringChangedGroup); |
| 1997 if (got_marked) marked = true; | 2000 if (got_marked) marked = true; |
| 1998 } | 2001 } |
| 1999 cur = casted->weak_next(); | 2002 cur = casted->weak_next(); |
| 2000 } | 2003 } |
| 2001 if (marked) Deoptimizer::DeoptimizeMarkedCode(isolate_); | 2004 if (marked) isolate_->stack_guard()->DeoptMarkedCode(); |
| 2002 } | 2005 } |
| 2003 | 2006 |
| 2004 | 2007 |
| 2005 void Heap::EvaluateOldSpaceLocalPretenuring( | 2008 void Heap::EvaluateOldSpaceLocalPretenuring( |
| 2006 uint64_t size_of_objects_before_gc) { | 2009 uint64_t size_of_objects_before_gc) { |
| 2007 uint64_t size_of_objects_after_gc = SizeOfObjects(); | 2010 uint64_t size_of_objects_after_gc = SizeOfObjects(); |
| 2008 double old_generation_survival_rate = | 2011 double old_generation_survival_rate = |
| 2009 (static_cast<double>(size_of_objects_after_gc) * 100) / | 2012 (static_cast<double>(size_of_objects_after_gc) * 100) / |
| 2010 static_cast<double>(size_of_objects_before_gc); | 2013 static_cast<double>(size_of_objects_before_gc); |
| 2011 | 2014 |
| (...skipping 5785 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 7797 static_cast<int>(object_sizes_last_time_[index])); | 7800 static_cast<int>(object_sizes_last_time_[index])); |
| 7798 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 7801 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 7799 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 7802 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 7800 | 7803 |
| 7801 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 7804 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 7802 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 7805 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 7803 ClearObjectStats(); | 7806 ClearObjectStats(); |
| 7804 } | 7807 } |
| 7805 | 7808 |
| 7806 } } // namespace v8::internal | 7809 } } // namespace v8::internal |
| OLD | NEW |