OLD | NEW |
---|---|
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 527 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
538 dont_tenure_decisions++; | 538 dont_tenure_decisions++; |
539 } | 539 } |
540 allocation_sites++; | 540 allocation_sites++; |
541 if (use_scratchpad) { | 541 if (use_scratchpad) { |
542 i++; | 542 i++; |
543 } else { | 543 } else { |
544 list_element = site->weak_next(); | 544 list_element = site->weak_next(); |
545 } | 545 } |
546 } | 546 } |
547 | 547 |
548 if (trigger_deoptimization) isolate_->stack_guard()->DeoptMarkedCode(); | 548 if (trigger_deoptimization) { |
549 isolate_->stack_guard()->DeoptMarkedAllocationSites(); | |
550 } | |
549 | 551 |
550 FlushAllocationSitesScratchpad(); | 552 FlushAllocationSitesScratchpad(); |
551 | 553 |
552 if (FLAG_trace_pretenuring_statistics && | 554 if (FLAG_trace_pretenuring_statistics && |
553 (allocation_mementos_found > 0 || | 555 (allocation_mementos_found > 0 || |
554 tenure_decisions > 0 || | 556 tenure_decisions > 0 || |
555 dont_tenure_decisions > 0)) { | 557 dont_tenure_decisions > 0)) { |
556 PrintF("GC: (mode, #visited allocation sites, #active allocation sites, " | 558 PrintF("GC: (mode, #visited allocation sites, #active allocation sites, " |
557 "#mementos, #tenure decisions, #donttenure decisions) " | 559 "#mementos, #tenure decisions, #donttenure decisions) " |
558 "(%s, %d, %d, %d, %d, %d)\n", | 560 "(%s, %d, %d, %d, %d, %d)\n", |
559 use_scratchpad ? "use scratchpad" : "use list", | 561 use_scratchpad ? "use scratchpad" : "use list", |
560 allocation_sites, | 562 allocation_sites, |
561 active_allocation_sites, | 563 active_allocation_sites, |
562 allocation_mementos_found, | 564 allocation_mementos_found, |
563 tenure_decisions, | 565 tenure_decisions, |
564 dont_tenure_decisions); | 566 dont_tenure_decisions); |
565 } | 567 } |
566 } | 568 } |
567 } | 569 } |
568 | 570 |
569 | 571 |
572 void Heap::DeoptMarkedAllocationSites() { | |
573 // TODO(hpayer): If iterating over the alloation sites list becomes a | |
mvstanton
2014/02/17 12:10:06
"allocation"
Hannes Payer (out of office)
2014/02/17 12:14:43
Done.
| |
574 // performance issue, use a cache heap data structure instead (similar to the | |
575 // allocation sites scratchpad). | |
576 Object* list_element = allocation_sites_list(); | |
577 while (list_element->IsAllocationSite()) { | |
578 AllocationSite* site = AllocationSite::cast(list_element); | |
579 if (site->deopt_dependent_code()) { | |
580 site->dependent_code()->MarkCodeForDeoptimization( | |
581 isolate_, | |
582 DependentCode::kAllocationSiteTenuringChangedGroup); | |
583 site->set_deopt_dependent_code(false); | |
584 } | |
585 list_element = site->weak_next(); | |
586 } | |
587 Deoptimizer::DeoptimizeMarkedCode(isolate_); | |
588 } | |
589 | |
590 | |
570 void Heap::GarbageCollectionEpilogue() { | 591 void Heap::GarbageCollectionEpilogue() { |
571 store_buffer()->GCEpilogue(); | 592 store_buffer()->GCEpilogue(); |
572 | 593 |
573 // In release mode, we only zap the from space under heap verification. | 594 // In release mode, we only zap the from space under heap verification. |
574 if (Heap::ShouldZapGarbage()) { | 595 if (Heap::ShouldZapGarbage()) { |
575 ZapFromSpace(); | 596 ZapFromSpace(); |
576 } | 597 } |
577 | 598 |
578 // Process pretenuring feedback and update allocation sites. | 599 // Process pretenuring feedback and update allocation sites. |
579 ProcessPretenuringFeedback(); | 600 ProcessPretenuringFeedback(); |
(...skipping 1413 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1993 | 2014 |
1994 | 2015 |
1995 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) { | 2016 void Heap::ResetAllAllocationSitesDependentCode(PretenureFlag flag) { |
1996 DisallowHeapAllocation no_allocation_scope; | 2017 DisallowHeapAllocation no_allocation_scope; |
1997 Object* cur = allocation_sites_list(); | 2018 Object* cur = allocation_sites_list(); |
1998 bool marked = false; | 2019 bool marked = false; |
1999 while (cur->IsAllocationSite()) { | 2020 while (cur->IsAllocationSite()) { |
2000 AllocationSite* casted = AllocationSite::cast(cur); | 2021 AllocationSite* casted = AllocationSite::cast(cur); |
2001 if (casted->GetPretenureMode() == flag) { | 2022 if (casted->GetPretenureMode() == flag) { |
2002 casted->ResetPretenureDecision(); | 2023 casted->ResetPretenureDecision(); |
2003 bool got_marked = casted->dependent_code()->MarkCodeForDeoptimization( | 2024 casted->set_deopt_dependent_code(true); |
2004 isolate_, | 2025 marked = true; |
2005 DependentCode::kAllocationSiteTenuringChangedGroup); | |
2006 if (got_marked) marked = true; | |
2007 } | 2026 } |
2008 cur = casted->weak_next(); | 2027 cur = casted->weak_next(); |
2009 } | 2028 } |
2010 if (marked) isolate_->stack_guard()->DeoptMarkedCode(); | 2029 if (marked) isolate_->stack_guard()->DeoptMarkedAllocationSites(); |
2011 } | 2030 } |
2012 | 2031 |
2013 | 2032 |
2014 void Heap::EvaluateOldSpaceLocalPretenuring( | 2033 void Heap::EvaluateOldSpaceLocalPretenuring( |
2015 uint64_t size_of_objects_before_gc) { | 2034 uint64_t size_of_objects_before_gc) { |
2016 uint64_t size_of_objects_after_gc = SizeOfObjects(); | 2035 uint64_t size_of_objects_after_gc = SizeOfObjects(); |
2017 double old_generation_survival_rate = | 2036 double old_generation_survival_rate = |
2018 (static_cast<double>(size_of_objects_after_gc) * 100) / | 2037 (static_cast<double>(size_of_objects_after_gc) * 100) / |
2019 static_cast<double>(size_of_objects_before_gc); | 2038 static_cast<double>(size_of_objects_before_gc); |
2020 | 2039 |
(...skipping 5699 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
7720 static_cast<int>(object_sizes_last_time_[index])); | 7739 static_cast<int>(object_sizes_last_time_[index])); |
7721 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 7740 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
7722 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 7741 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
7723 | 7742 |
7724 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 7743 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
7725 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 7744 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
7726 ClearObjectStats(); | 7745 ClearObjectStats(); |
7727 } | 7746 } |
7728 | 7747 |
7729 } } // namespace v8::internal | 7748 } } // namespace v8::internal |
OLD | NEW |