| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 655 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 666 | 666 |
| 667 #if defined(DEBUG) | 667 #if defined(DEBUG) |
| 668 ReportStatisticsAfterGC(); | 668 ReportStatisticsAfterGC(); |
| 669 #endif // DEBUG | 669 #endif // DEBUG |
| 670 #ifdef ENABLE_DEBUGGER_SUPPORT | 670 #ifdef ENABLE_DEBUGGER_SUPPORT |
| 671 isolate_->debug()->AfterGarbageCollection(); | 671 isolate_->debug()->AfterGarbageCollection(); |
| 672 #endif // ENABLE_DEBUGGER_SUPPORT | 672 #endif // ENABLE_DEBUGGER_SUPPORT |
| 673 } | 673 } |
| 674 | 674 |
| 675 | 675 |
| 676 void Heap::CollectAllGarbage(int flags, const char* gc_reason) { | 676 void Heap::CollectAllGarbage(int flags, |
| 677 const char* gc_reason, |
| 678 const v8::GCCallbackFlags gc_callback_flags) { |
| 677 // Since we are ignoring the return value, the exact choice of space does | 679 // Since we are ignoring the return value, the exact choice of space does |
| 678 // not matter, so long as we do not specify NEW_SPACE, which would not | 680 // not matter, so long as we do not specify NEW_SPACE, which would not |
| 679 // cause a full GC. | 681 // cause a full GC. |
| 680 mark_compact_collector_.SetFlags(flags); | 682 mark_compact_collector_.SetFlags(flags); |
| 681 CollectGarbage(OLD_POINTER_SPACE, gc_reason); | 683 CollectGarbage(OLD_POINTER_SPACE, gc_reason, gc_callback_flags); |
| 682 mark_compact_collector_.SetFlags(kNoGCFlags); | 684 mark_compact_collector_.SetFlags(kNoGCFlags); |
| 683 } | 685 } |
| 684 | 686 |
| 685 | 687 |
| 686 void Heap::CollectAllAvailableGarbage(const char* gc_reason) { | 688 void Heap::CollectAllAvailableGarbage(const char* gc_reason) { |
| 687 // Since we are ignoring the return value, the exact choice of space does | 689 // Since we are ignoring the return value, the exact choice of space does |
| 688 // not matter, so long as we do not specify NEW_SPACE, which would not | 690 // not matter, so long as we do not specify NEW_SPACE, which would not |
| 689 // cause a full GC. | 691 // cause a full GC. |
| 690 // Major GC would invoke weak handle callbacks on weakly reachable | 692 // Major GC would invoke weak handle callbacks on weakly reachable |
| 691 // handles, but won't collect weakly reachable objects until next | 693 // handles, but won't collect weakly reachable objects until next |
| (...skipping 22 matching lines...) Expand all Loading... |
| 714 mark_compact_collector()->SetFlags(kNoGCFlags); | 716 mark_compact_collector()->SetFlags(kNoGCFlags); |
| 715 new_space_.Shrink(); | 717 new_space_.Shrink(); |
| 716 UncommitFromSpace(); | 718 UncommitFromSpace(); |
| 717 incremental_marking()->UncommitMarkingDeque(); | 719 incremental_marking()->UncommitMarkingDeque(); |
| 718 } | 720 } |
| 719 | 721 |
| 720 | 722 |
| 721 bool Heap::CollectGarbage(AllocationSpace space, | 723 bool Heap::CollectGarbage(AllocationSpace space, |
| 722 GarbageCollector collector, | 724 GarbageCollector collector, |
| 723 const char* gc_reason, | 725 const char* gc_reason, |
| 724 const char* collector_reason) { | 726 const char* collector_reason, |
| 727 const v8::GCCallbackFlags gc_callback_flags) { |
| 725 // The VM is in the GC state until exiting this function. | 728 // The VM is in the GC state until exiting this function. |
| 726 VMState<GC> state(isolate_); | 729 VMState<GC> state(isolate_); |
| 727 | 730 |
| 728 #ifdef DEBUG | 731 #ifdef DEBUG |
| 729 // Reset the allocation timeout to the GC interval, but make sure to | 732 // Reset the allocation timeout to the GC interval, but make sure to |
| 730 // allow at least a few allocations after a collection. The reason | 733 // allow at least a few allocations after a collection. The reason |
| 731 // for this is that we have a lot of allocation sequences and we | 734 // for this is that we have a lot of allocation sequences and we |
| 732 // assume that a garbage collection will allow the subsequent | 735 // assume that a garbage collection will allow the subsequent |
| 733 // allocation attempts to go through. | 736 // allocation attempts to go through. |
| 734 allocation_timeout_ = Max(6, FLAG_gc_interval); | 737 allocation_timeout_ = Max(6, FLAG_gc_interval); |
| (...skipping 34 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 769 tracer.set_gc_count(gc_count_); | 772 tracer.set_gc_count(gc_count_); |
| 770 | 773 |
| 771 // Tell the tracer which collector we've selected. | 774 // Tell the tracer which collector we've selected. |
| 772 tracer.set_collector(collector); | 775 tracer.set_collector(collector); |
| 773 | 776 |
| 774 { | 777 { |
| 775 HistogramTimerScope histogram_timer_scope( | 778 HistogramTimerScope histogram_timer_scope( |
| 776 (collector == SCAVENGER) ? isolate_->counters()->gc_scavenger() | 779 (collector == SCAVENGER) ? isolate_->counters()->gc_scavenger() |
| 777 : isolate_->counters()->gc_compactor()); | 780 : isolate_->counters()->gc_compactor()); |
| 778 next_gc_likely_to_collect_more = | 781 next_gc_likely_to_collect_more = |
| 779 PerformGarbageCollection(collector, &tracer); | 782 PerformGarbageCollection(collector, &tracer, gc_callback_flags); |
| 780 } | 783 } |
| 781 | 784 |
| 782 GarbageCollectionEpilogue(); | 785 GarbageCollectionEpilogue(); |
| 783 } | 786 } |
| 784 | 787 |
| 785 // Start incremental marking for the next cycle. The heap snapshot | 788 // Start incremental marking for the next cycle. The heap snapshot |
| 786 // generator needs incremental marking to stay off after it aborted. | 789 // generator needs incremental marking to stay off after it aborted. |
| 787 if (!mark_compact_collector()->abort_incremental_marking() && | 790 if (!mark_compact_collector()->abort_incremental_marking() && |
| 788 incremental_marking()->IsStopped() && | 791 incremental_marking()->IsStopped() && |
| 789 incremental_marking()->WorthActivating() && | 792 incremental_marking()->WorthActivating() && |
| (...skipping 206 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 996 set_survival_rate_trend(DECREASING); | 999 set_survival_rate_trend(DECREASING); |
| 997 } else if (survival_rate_diff < -kYoungSurvivalRateAllowedDeviation) { | 1000 } else if (survival_rate_diff < -kYoungSurvivalRateAllowedDeviation) { |
| 998 set_survival_rate_trend(INCREASING); | 1001 set_survival_rate_trend(INCREASING); |
| 999 } else { | 1002 } else { |
| 1000 set_survival_rate_trend(STABLE); | 1003 set_survival_rate_trend(STABLE); |
| 1001 } | 1004 } |
| 1002 | 1005 |
| 1003 survival_rate_ = survival_rate; | 1006 survival_rate_ = survival_rate; |
| 1004 } | 1007 } |
| 1005 | 1008 |
| 1006 bool Heap::PerformGarbageCollection(GarbageCollector collector, | 1009 bool Heap::PerformGarbageCollection( |
| 1007 GCTracer* tracer) { | 1010 GarbageCollector collector, |
| 1011 GCTracer* tracer, |
| 1012 const v8::GCCallbackFlags gc_callback_flags) { |
| 1008 bool next_gc_likely_to_collect_more = false; | 1013 bool next_gc_likely_to_collect_more = false; |
| 1009 | 1014 |
| 1010 if (collector != SCAVENGER) { | 1015 if (collector != SCAVENGER) { |
| 1011 PROFILE(isolate_, CodeMovingGCEvent()); | 1016 PROFILE(isolate_, CodeMovingGCEvent()); |
| 1012 } | 1017 } |
| 1013 | 1018 |
| 1014 #ifdef VERIFY_HEAP | 1019 #ifdef VERIFY_HEAP |
| 1015 if (FLAG_verify_heap) { | 1020 if (FLAG_verify_heap) { |
| 1016 VerifyStringTable(this); | 1021 VerifyStringTable(this); |
| 1017 } | 1022 } |
| (...skipping 107 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1125 if (collector == MARK_COMPACTOR) { | 1130 if (collector == MARK_COMPACTOR) { |
| 1126 // Register the amount of external allocated memory. | 1131 // Register the amount of external allocated memory. |
| 1127 amount_of_external_allocated_memory_at_last_global_gc_ = | 1132 amount_of_external_allocated_memory_at_last_global_gc_ = |
| 1128 amount_of_external_allocated_memory_; | 1133 amount_of_external_allocated_memory_; |
| 1129 } | 1134 } |
| 1130 | 1135 |
| 1131 { | 1136 { |
| 1132 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); | 1137 GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL); |
| 1133 VMState<EXTERNAL> state(isolate_); | 1138 VMState<EXTERNAL> state(isolate_); |
| 1134 HandleScope handle_scope(isolate_); | 1139 HandleScope handle_scope(isolate_); |
| 1135 CallGCEpilogueCallbacks(gc_type); | 1140 CallGCEpilogueCallbacks(gc_type, gc_callback_flags); |
| 1136 } | 1141 } |
| 1137 | 1142 |
| 1138 #ifdef VERIFY_HEAP | 1143 #ifdef VERIFY_HEAP |
| 1139 if (FLAG_verify_heap) { | 1144 if (FLAG_verify_heap) { |
| 1140 VerifyStringTable(this); | 1145 VerifyStringTable(this); |
| 1141 } | 1146 } |
| 1142 #endif | 1147 #endif |
| 1143 | 1148 |
| 1144 return next_gc_likely_to_collect_more; | 1149 return next_gc_likely_to_collect_more; |
| 1145 } | 1150 } |
| 1146 | 1151 |
| 1147 | 1152 |
| 1148 void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) { | 1153 void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) { |
| 1149 for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) { | 1154 for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) { |
| 1150 if (gc_type & gc_prologue_callbacks_[i].gc_type) { | 1155 if (gc_type & gc_prologue_callbacks_[i].gc_type) { |
| 1151 if (!gc_prologue_callbacks_[i].pass_isolate_) { | 1156 if (!gc_prologue_callbacks_[i].pass_isolate_) { |
| 1152 v8::GCPrologueCallback callback = | 1157 v8::GCPrologueCallback callback = |
| 1153 reinterpret_cast<v8::GCPrologueCallback>( | 1158 reinterpret_cast<v8::GCPrologueCallback>( |
| 1154 gc_prologue_callbacks_[i].callback); | 1159 gc_prologue_callbacks_[i].callback); |
| 1155 callback(gc_type, flags); | 1160 callback(gc_type, flags); |
| 1156 } else { | 1161 } else { |
| 1157 v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this->isolate()); | 1162 v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this->isolate()); |
| 1158 gc_prologue_callbacks_[i].callback(isolate, gc_type, flags); | 1163 gc_prologue_callbacks_[i].callback(isolate, gc_type, flags); |
| 1159 } | 1164 } |
| 1160 } | 1165 } |
| 1161 } | 1166 } |
| 1162 } | 1167 } |
| 1163 | 1168 |
| 1164 | 1169 |
| 1165 void Heap::CallGCEpilogueCallbacks(GCType gc_type) { | 1170 void Heap::CallGCEpilogueCallbacks(GCType gc_type, |
| 1171 GCCallbackFlags gc_callback_flags) { |
| 1166 for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) { | 1172 for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) { |
| 1167 if (gc_type & gc_epilogue_callbacks_[i].gc_type) { | 1173 if (gc_type & gc_epilogue_callbacks_[i].gc_type) { |
| 1168 if (!gc_epilogue_callbacks_[i].pass_isolate_) { | 1174 if (!gc_epilogue_callbacks_[i].pass_isolate_) { |
| 1169 v8::GCPrologueCallback callback = | 1175 v8::GCPrologueCallback callback = |
| 1170 reinterpret_cast<v8::GCPrologueCallback>( | 1176 reinterpret_cast<v8::GCPrologueCallback>( |
| 1171 gc_epilogue_callbacks_[i].callback); | 1177 gc_epilogue_callbacks_[i].callback); |
| 1172 callback(gc_type, kNoGCCallbackFlags); | 1178 callback(gc_type, gc_callback_flags); |
| 1173 } else { | 1179 } else { |
| 1174 v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this->isolate()); | 1180 v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this->isolate()); |
| 1175 gc_epilogue_callbacks_[i].callback( | 1181 gc_epilogue_callbacks_[i].callback( |
| 1176 isolate, gc_type, kNoGCCallbackFlags); | 1182 isolate, gc_type, gc_callback_flags); |
| 1177 } | 1183 } |
| 1178 } | 1184 } |
| 1179 } | 1185 } |
| 1180 } | 1186 } |
| 1181 | 1187 |
| 1182 | 1188 |
| 1183 void Heap::MarkCompact(GCTracer* tracer) { | 1189 void Heap::MarkCompact(GCTracer* tracer) { |
| 1184 gc_state_ = MARK_COMPACT; | 1190 gc_state_ = MARK_COMPACT; |
| 1185 LOG(isolate_, ResourceEvent("markcompact", "begin")); | 1191 LOG(isolate_, ResourceEvent("markcompact", "begin")); |
| 1186 | 1192 |
| (...skipping 6801 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 7988 static_cast<int>(object_sizes_last_time_[index])); | 7994 static_cast<int>(object_sizes_last_time_[index])); |
| 7989 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) | 7995 CODE_AGE_LIST_COMPLETE(ADJUST_LAST_TIME_OBJECT_COUNT) |
| 7990 #undef ADJUST_LAST_TIME_OBJECT_COUNT | 7996 #undef ADJUST_LAST_TIME_OBJECT_COUNT |
| 7991 | 7997 |
| 7992 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); | 7998 OS::MemCopy(object_counts_last_time_, object_counts_, sizeof(object_counts_)); |
| 7993 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); | 7999 OS::MemCopy(object_sizes_last_time_, object_sizes_, sizeof(object_sizes_)); |
| 7994 ClearObjectStats(); | 8000 ClearObjectStats(); |
| 7995 } | 8001 } |
| 7996 | 8002 |
| 7997 } } // namespace v8::internal | 8003 } } // namespace v8::internal |
| OLD | NEW |