OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 735 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
746 return next_gc_likely_to_collect_more; | 746 return next_gc_likely_to_collect_more; |
747 } | 747 } |
748 | 748 |
749 | 749 |
750 int Heap::NotifyContextDisposed() { | 750 int Heap::NotifyContextDisposed() { |
751 if (FLAG_concurrent_recompilation) { | 751 if (FLAG_concurrent_recompilation) { |
752 // Flush the queued recompilation tasks. | 752 // Flush the queued recompilation tasks. |
753 isolate()->optimizing_compiler_thread()->Flush(); | 753 isolate()->optimizing_compiler_thread()->Flush(); |
754 } | 754 } |
755 flush_monomorphic_ics_ = true; | 755 flush_monomorphic_ics_ = true; |
| 756 AgeInlineCaches(); |
756 return ++contexts_disposed_; | 757 return ++contexts_disposed_; |
757 } | 758 } |
758 | 759 |
759 | 760 |
760 void Heap::PerformScavenge() { | 761 void Heap::PerformScavenge() { |
761 GCTracer tracer(this, NULL, NULL); | 762 GCTracer tracer(this, NULL, NULL); |
762 if (incremental_marking()->IsStopped()) { | 763 if (incremental_marking()->IsStopped()) { |
763 PerformGarbageCollection(SCAVENGER, &tracer); | 764 PerformGarbageCollection(SCAVENGER, &tracer); |
764 } else { | 765 } else { |
765 PerformGarbageCollection(MARK_COMPACTOR, &tracer); | 766 PerformGarbageCollection(MARK_COMPACTOR, &tracer); |
(...skipping 377 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1143 MarkCompactPrologue(); | 1144 MarkCompactPrologue(); |
1144 | 1145 |
1145 mark_compact_collector_.CollectGarbage(); | 1146 mark_compact_collector_.CollectGarbage(); |
1146 | 1147 |
1147 LOG(isolate_, ResourceEvent("markcompact", "end")); | 1148 LOG(isolate_, ResourceEvent("markcompact", "end")); |
1148 | 1149 |
1149 gc_state_ = NOT_IN_GC; | 1150 gc_state_ = NOT_IN_GC; |
1150 | 1151 |
1151 isolate_->counters()->objs_since_last_full()->Set(0); | 1152 isolate_->counters()->objs_since_last_full()->Set(0); |
1152 | 1153 |
1153 contexts_disposed_ = 0; | |
1154 | |
1155 flush_monomorphic_ics_ = false; | 1154 flush_monomorphic_ics_ = false; |
1156 } | 1155 } |
1157 | 1156 |
1158 | 1157 |
1159 void Heap::MarkCompactPrologue() { | 1158 void Heap::MarkCompactPrologue() { |
1160 // At any old GC clear the keyed lookup cache to enable collection of unused | 1159 // At any old GC clear the keyed lookup cache to enable collection of unused |
1161 // maps. | 1160 // maps. |
1162 isolate_->keyed_lookup_cache()->Clear(); | 1161 isolate_->keyed_lookup_cache()->Clear(); |
1163 isolate_->context_slot_cache()->Clear(); | 1162 isolate_->context_slot_cache()->Clear(); |
1164 isolate_->descriptor_lookup_cache()->Clear(); | 1163 isolate_->descriptor_lookup_cache()->Clear(); |
(...skipping 4556 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5721 // Minimal hint that allows to do full GC. | 5720 // Minimal hint that allows to do full GC. |
5722 const int kMinHintForFullGC = 100; | 5721 const int kMinHintForFullGC = 100; |
5723 intptr_t size_factor = Min(Max(hint, 20), kMaxHint) / 4; | 5722 intptr_t size_factor = Min(Max(hint, 20), kMaxHint) / 4; |
5724 // The size factor is in range [5..250]. The numbers here are chosen from | 5723 // The size factor is in range [5..250]. The numbers here are chosen from |
5725 // experiments. If you changes them, make sure to test with | 5724 // experiments. If you changes them, make sure to test with |
5726 // chrome/performance_ui_tests --gtest_filter="GeneralMixMemoryTest.* | 5725 // chrome/performance_ui_tests --gtest_filter="GeneralMixMemoryTest.* |
5727 intptr_t step_size = | 5726 intptr_t step_size = |
5728 size_factor * IncrementalMarking::kAllocatedThreshold; | 5727 size_factor * IncrementalMarking::kAllocatedThreshold; |
5729 | 5728 |
5730 if (contexts_disposed_ > 0) { | 5729 if (contexts_disposed_ > 0) { |
5731 if (hint >= kMaxHint) { | 5730 contexts_disposed_ = 0; |
5732 // The embedder is requesting a lot of GC work after context disposal, | |
5733 // we age inline caches so that they don't keep objects from | |
5734 // the old context alive. | |
5735 AgeInlineCaches(); | |
5736 } | |
5737 int mark_sweep_time = Min(TimeMarkSweepWouldTakeInMs(), 1000); | 5731 int mark_sweep_time = Min(TimeMarkSweepWouldTakeInMs(), 1000); |
5738 if (hint >= mark_sweep_time && !FLAG_expose_gc && | 5732 if (hint >= mark_sweep_time && !FLAG_expose_gc && |
5739 incremental_marking()->IsStopped()) { | 5733 incremental_marking()->IsStopped()) { |
5740 HistogramTimerScope scope(isolate_->counters()->gc_context()); | 5734 HistogramTimerScope scope(isolate_->counters()->gc_context()); |
5741 CollectAllGarbage(kReduceMemoryFootprintMask, | 5735 CollectAllGarbage(kReduceMemoryFootprintMask, |
5742 "idle notification: contexts disposed"); | 5736 "idle notification: contexts disposed"); |
5743 } else { | 5737 } else { |
5744 AdvanceIdleIncrementalMarking(step_size); | 5738 AdvanceIdleIncrementalMarking(step_size); |
5745 contexts_disposed_ = 0; | |
5746 } | 5739 } |
| 5740 |
5747 // After context disposal there is likely a lot of garbage remaining, reset | 5741 // After context disposal there is likely a lot of garbage remaining, reset |
5748 // the idle notification counters in order to trigger more incremental GCs | 5742 // the idle notification counters in order to trigger more incremental GCs |
5749 // on subsequent idle notifications. | 5743 // on subsequent idle notifications. |
5750 StartIdleRound(); | 5744 StartIdleRound(); |
5751 return false; | 5745 return false; |
5752 } | 5746 } |
5753 | 5747 |
5754 if (!FLAG_incremental_marking || FLAG_expose_gc || Serializer::enabled()) { | 5748 if (!FLAG_incremental_marking || FLAG_expose_gc || Serializer::enabled()) { |
5755 return IdleGlobalGC(); | 5749 return IdleGlobalGC(); |
5756 } | 5750 } |
(...skipping 2148 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
7905 if (FLAG_concurrent_recompilation) { | 7899 if (FLAG_concurrent_recompilation) { |
7906 heap_->relocation_mutex_->Lock(); | 7900 heap_->relocation_mutex_->Lock(); |
7907 #ifdef DEBUG | 7901 #ifdef DEBUG |
7908 heap_->relocation_mutex_locked_by_optimizer_thread_ = | 7902 heap_->relocation_mutex_locked_by_optimizer_thread_ = |
7909 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); | 7903 heap_->isolate()->optimizing_compiler_thread()->IsOptimizerThread(); |
7910 #endif // DEBUG | 7904 #endif // DEBUG |
7911 } | 7905 } |
7912 } | 7906 } |
7913 | 7907 |
7914 } } // namespace v8::internal | 7908 } } // namespace v8::internal |
OLD | NEW |