Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(149)

Side by Side Diff: src/heap/heap.cc

Issue 1208993009: Reland "Replace reduce-memory mode in idle notification with delayed clean-up GC." (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/memory-reducer.h » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/base/once.h" 10 #include "src/base/once.h"
11 #include "src/base/utils/random-number-generator.h" 11 #include "src/base/utils/random-number-generator.h"
12 #include "src/bootstrapper.h" 12 #include "src/bootstrapper.h"
13 #include "src/codegen.h" 13 #include "src/codegen.h"
14 #include "src/compilation-cache.h" 14 #include "src/compilation-cache.h"
15 #include "src/conversions.h" 15 #include "src/conversions.h"
16 #include "src/cpu-profiler.h" 16 #include "src/cpu-profiler.h"
17 #include "src/debug.h" 17 #include "src/debug.h"
18 #include "src/deoptimizer.h" 18 #include "src/deoptimizer.h"
19 #include "src/global-handles.h" 19 #include "src/global-handles.h"
20 #include "src/heap/gc-idle-time-handler.h" 20 #include "src/heap/gc-idle-time-handler.h"
21 #include "src/heap/incremental-marking.h" 21 #include "src/heap/incremental-marking.h"
22 #include "src/heap/mark-compact.h" 22 #include "src/heap/mark-compact.h"
23 #include "src/heap/memory-reducer.h"
23 #include "src/heap/objects-visiting-inl.h" 24 #include "src/heap/objects-visiting-inl.h"
24 #include "src/heap/objects-visiting.h" 25 #include "src/heap/objects-visiting.h"
25 #include "src/heap/store-buffer.h" 26 #include "src/heap/store-buffer.h"
26 #include "src/heap-profiler.h" 27 #include "src/heap-profiler.h"
27 #include "src/runtime-profiler.h" 28 #include "src/runtime-profiler.h"
28 #include "src/scopeinfo.h" 29 #include "src/scopeinfo.h"
29 #include "src/snapshot/natives.h" 30 #include "src/snapshot/natives.h"
30 #include "src/snapshot/serialize.h" 31 #include "src/snapshot/serialize.h"
31 #include "src/snapshot/snapshot.h" 32 #include "src/snapshot/snapshot.h"
32 #include "src/utils.h" 33 #include "src/utils.h"
(...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after
100 raw_allocations_hash_(0), 101 raw_allocations_hash_(0),
101 dump_allocations_hash_countdown_(FLAG_dump_allocations_digest_at_alloc), 102 dump_allocations_hash_countdown_(FLAG_dump_allocations_digest_at_alloc),
102 ms_count_(0), 103 ms_count_(0),
103 gc_count_(0), 104 gc_count_(0),
104 remembered_unmapped_pages_index_(0), 105 remembered_unmapped_pages_index_(0),
105 unflattened_strings_length_(0), 106 unflattened_strings_length_(0),
106 #ifdef DEBUG 107 #ifdef DEBUG
107 allocation_timeout_(0), 108 allocation_timeout_(0),
108 #endif // DEBUG 109 #endif // DEBUG
109 old_generation_allocation_limit_(initial_old_generation_size_), 110 old_generation_allocation_limit_(initial_old_generation_size_),
110 idle_old_generation_allocation_limit_(
111 kMinimumOldGenerationAllocationLimit),
112 old_gen_exhausted_(false), 111 old_gen_exhausted_(false),
113 inline_allocation_disabled_(false), 112 inline_allocation_disabled_(false),
114 store_buffer_rebuilder_(store_buffer()), 113 store_buffer_rebuilder_(store_buffer()),
115 hidden_string_(NULL), 114 hidden_string_(NULL),
116 gc_safe_size_of_old_object_(NULL), 115 gc_safe_size_of_old_object_(NULL),
117 total_regexp_code_generated_(0), 116 total_regexp_code_generated_(0),
118 tracer_(this), 117 tracer_(this),
119 new_space_high_promotion_mode_active_(false), 118 new_space_high_promotion_mode_active_(false),
120 gathering_lifetime_feedback_(0), 119 gathering_lifetime_feedback_(0),
121 high_survival_rate_period_length_(0), 120 high_survival_rate_period_length_(0),
(...skipping 15 matching lines...) Expand all
137 max_alive_after_gc_(0), 136 max_alive_after_gc_(0),
138 min_in_mutator_(kMaxInt), 137 min_in_mutator_(kMaxInt),
139 marking_time_(0.0), 138 marking_time_(0.0),
140 sweeping_time_(0.0), 139 sweeping_time_(0.0),
141 last_idle_notification_time_(0.0), 140 last_idle_notification_time_(0.0),
142 last_gc_time_(0.0), 141 last_gc_time_(0.0),
143 mark_compact_collector_(this), 142 mark_compact_collector_(this),
144 store_buffer_(this), 143 store_buffer_(this),
145 marking_(this), 144 marking_(this),
146 incremental_marking_(this), 145 incremental_marking_(this),
146 memory_reducer_(this),
147 full_codegen_bytes_generated_(0), 147 full_codegen_bytes_generated_(0),
148 crankshaft_codegen_bytes_generated_(0), 148 crankshaft_codegen_bytes_generated_(0),
149 new_space_allocation_counter_(0), 149 new_space_allocation_counter_(0),
150 old_generation_allocation_counter_(0), 150 old_generation_allocation_counter_(0),
151 old_generation_size_at_last_gc_(0), 151 old_generation_size_at_last_gc_(0),
152 gcs_since_last_deopt_(0), 152 gcs_since_last_deopt_(0),
153 allocation_sites_scratchpad_length_(0), 153 allocation_sites_scratchpad_length_(0),
154 ring_buffer_full_(false), 154 ring_buffer_full_(false),
155 ring_buffer_end_(0), 155 ring_buffer_end_(0),
156 promotion_queue_(this), 156 promotion_queue_(this),
(...skipping 763 matching lines...) Expand 10 before | Expand all | Expand 10 after
920 !mark_compact_collector_.marking_deque_.IsEmpty() && !FLAG_gc_global) { 920 !mark_compact_collector_.marking_deque_.IsEmpty() && !FLAG_gc_global) {
921 if (FLAG_trace_incremental_marking) { 921 if (FLAG_trace_incremental_marking) {
922 PrintF("[IncrementalMarking] Delaying MarkSweep.\n"); 922 PrintF("[IncrementalMarking] Delaying MarkSweep.\n");
923 } 923 }
924 collector = SCAVENGER; 924 collector = SCAVENGER;
925 collector_reason = "incremental marking delaying mark-sweep"; 925 collector_reason = "incremental marking delaying mark-sweep";
926 } 926 }
927 } 927 }
928 928
929 bool next_gc_likely_to_collect_more = false; 929 bool next_gc_likely_to_collect_more = false;
930 intptr_t committed_memory_before = 0;
931
932 if (collector == MARK_COMPACTOR) {
933 committed_memory_before = CommittedOldGenerationMemory();
934 }
930 935
931 { 936 {
932 tracer()->Start(collector, gc_reason, collector_reason); 937 tracer()->Start(collector, gc_reason, collector_reason);
933 DCHECK(AllowHeapAllocation::IsAllowed()); 938 DCHECK(AllowHeapAllocation::IsAllowed());
934 DisallowHeapAllocation no_allocation_during_gc; 939 DisallowHeapAllocation no_allocation_during_gc;
935 GarbageCollectionPrologue(); 940 GarbageCollectionPrologue();
936 941
937 { 942 {
938 HistogramTimerScope histogram_timer_scope( 943 HistogramTimerScope histogram_timer_scope(
939 (collector == SCAVENGER) ? isolate_->counters()->gc_scavenger() 944 (collector == SCAVENGER) ? isolate_->counters()->gc_scavenger()
940 : isolate_->counters()->gc_compactor()); 945 : isolate_->counters()->gc_compactor());
941 next_gc_likely_to_collect_more = 946 next_gc_likely_to_collect_more =
942 PerformGarbageCollection(collector, gc_callback_flags); 947 PerformGarbageCollection(collector, gc_callback_flags);
943 } 948 }
944 949
945 GarbageCollectionEpilogue(); 950 GarbageCollectionEpilogue();
946 if (collector == MARK_COMPACTOR && FLAG_track_detached_contexts) { 951 if (collector == MARK_COMPACTOR && FLAG_track_detached_contexts) {
947 isolate()->CheckDetachedContextsAfterGC(); 952 isolate()->CheckDetachedContextsAfterGC();
948 } 953 }
949 954
950 if (collector == MARK_COMPACTOR) { 955 if (collector == MARK_COMPACTOR) {
951 gc_idle_time_handler_.NotifyMarkCompact(next_gc_likely_to_collect_more); 956 intptr_t committed_memory_after = CommittedOldGenerationMemory();
952 } else { 957 intptr_t used_memory_after = PromotedSpaceSizeOfObjects();
953 gc_idle_time_handler_.NotifyScavenge(); 958 MemoryReducer::Event event;
959 event.type = MemoryReducer::kMarkCompact;
960 event.time_ms = MonotonicallyIncreasingTimeInMs();
961 // Trigger one more GC if
962 // - this GC decreased committed memory,
963 // - there is high fragmentation,
964 // - there are live detached contexts.
965 event.next_gc_likely_to_collect_more =
966 (committed_memory_before - committed_memory_after) > MB ||
967 HasHighFragmentation(used_memory_after, committed_memory_after) ||
968 (detached_contexts()->length() > 0);
969 memory_reducer_.NotifyMarkCompact(event);
954 } 970 }
955 971
956 tracer()->Stop(collector); 972 tracer()->Stop(collector);
957 } 973 }
958 974
959 if (collector == MARK_COMPACTOR && 975 if (collector == MARK_COMPACTOR &&
960 (gc_callback_flags & kGCCallbackFlagForced) != 0) { 976 (gc_callback_flags & kGCCallbackFlagForced) != 0) {
961 isolate()->CountUsage(v8::Isolate::kForcedGC); 977 isolate()->CountUsage(v8::Isolate::kForcedGC);
962 } 978 }
963 979
(...skipping 14 matching lines...) Expand all
978 tracer()->ResetSurvivalEvents(); 994 tracer()->ResetSurvivalEvents();
979 old_generation_size_configured_ = false; 995 old_generation_size_configured_ = false;
980 } 996 }
981 if (isolate()->concurrent_recompilation_enabled()) { 997 if (isolate()->concurrent_recompilation_enabled()) {
982 // Flush the queued recompilation tasks. 998 // Flush the queued recompilation tasks.
983 isolate()->optimizing_compile_dispatcher()->Flush(); 999 isolate()->optimizing_compile_dispatcher()->Flush();
984 } 1000 }
985 AgeInlineCaches(); 1001 AgeInlineCaches();
986 set_retained_maps(ArrayList::cast(empty_fixed_array())); 1002 set_retained_maps(ArrayList::cast(empty_fixed_array()));
987 tracer()->AddContextDisposalTime(base::OS::TimeCurrentMillis()); 1003 tracer()->AddContextDisposalTime(base::OS::TimeCurrentMillis());
1004 MemoryReducer::Event event;
1005 event.type = MemoryReducer::kContextDisposed;
1006 event.time_ms = MonotonicallyIncreasingTimeInMs();
1007 memory_reducer_.NotifyContextDisposed(event);
988 return ++contexts_disposed_; 1008 return ++contexts_disposed_;
989 } 1009 }
990 1010
991 1011
1012 void Heap::StartIdleIncrementalMarking() {
1013 gc_idle_time_handler_.ResetNoProgressCounter();
1014 incremental_marking()->Start(kReduceMemoryFootprintMask);
1015 }
1016
1017
992 void Heap::MoveElements(FixedArray* array, int dst_index, int src_index, 1018 void Heap::MoveElements(FixedArray* array, int dst_index, int src_index,
993 int len) { 1019 int len) {
994 if (len == 0) return; 1020 if (len == 0) return;
995 1021
996 DCHECK(array->map() != fixed_cow_array_map()); 1022 DCHECK(array->map() != fixed_cow_array_map());
997 Object** dst_objects = array->data_start() + dst_index; 1023 Object** dst_objects = array->data_start() + dst_index;
998 MemMove(dst_objects, array->data_start() + src_index, len * kPointerSize); 1024 MemMove(dst_objects, array->data_start() + src_index, len * kPointerSize);
999 if (!InNewSpace(array)) { 1025 if (!InNewSpace(array)) {
1000 for (int i = 0; i < len; i++) { 1026 for (int i = 0; i < len; i++) {
1001 // TODO(hpayer): check store buffer for entries 1027 // TODO(hpayer): check store buffer for entries
(...skipping 3754 matching lines...) Expand 10 before | Expand all | Expand 10 after
4756 return mutator_utilization > high_mutator_utilization; 4782 return mutator_utilization > high_mutator_utilization;
4757 } 4783 }
4758 4784
4759 4785
4760 bool Heap::HasLowAllocationRate() { 4786 bool Heap::HasLowAllocationRate() {
4761 return HasLowYoungGenerationAllocationRate() && 4787 return HasLowYoungGenerationAllocationRate() &&
4762 HasLowOldGenerationAllocationRate(); 4788 HasLowOldGenerationAllocationRate();
4763 } 4789 }
4764 4790
4765 4791
4792 bool Heap::HasHighFragmentation() {
4793 intptr_t used = PromotedSpaceSizeOfObjects();
4794 intptr_t committed = CommittedOldGenerationMemory();
4795 return HasHighFragmentation(used, committed);
4796 }
4797
4798
4799 bool Heap::HasHighFragmentation(intptr_t used, intptr_t committed) {
4800 const intptr_t kSlack = 16 * MB;
4801 // Fragmentation is high if committed > 2 * used + kSlack.
4802 // Rewrite the exression to avoid overflow.
4803 return committed - used > used + kSlack;
4804 }
4805
4806
4766 void Heap::ReduceNewSpaceSize() { 4807 void Heap::ReduceNewSpaceSize() {
4767 if (!FLAG_predictable && HasLowAllocationRate()) { 4808 if (!FLAG_predictable && HasLowAllocationRate()) {
4768 new_space_.Shrink(); 4809 new_space_.Shrink();
4769 UncommitFromSpace(); 4810 UncommitFromSpace();
4770 } 4811 }
4771 } 4812 }
4772 4813
4773 4814
4774 bool Heap::TryFinalizeIdleIncrementalMarking( 4815 bool Heap::TryFinalizeIdleIncrementalMarking(
4775 double idle_time_in_ms, size_t size_of_objects, 4816 double idle_time_in_ms, size_t size_of_objects,
4776 size_t final_incremental_mark_compact_speed_in_bytes_per_ms) { 4817 size_t final_incremental_mark_compact_speed_in_bytes_per_ms) {
4777 if (FLAG_overapproximate_weak_closure && 4818 if (FLAG_overapproximate_weak_closure &&
4778 (incremental_marking()->IsReadyToOverApproximateWeakClosure() || 4819 (incremental_marking()->IsReadyToOverApproximateWeakClosure() ||
4779 (!incremental_marking()->weak_closure_was_overapproximated() && 4820 (!incremental_marking()->weak_closure_was_overapproximated() &&
4780 mark_compact_collector_.marking_deque()->IsEmpty() && 4821 mark_compact_collector_.marking_deque()->IsEmpty() &&
4781 gc_idle_time_handler_.ShouldDoOverApproximateWeakClosure( 4822 gc_idle_time_handler_.ShouldDoOverApproximateWeakClosure(
4782 static_cast<size_t>(idle_time_in_ms))))) { 4823 static_cast<size_t>(idle_time_in_ms))))) {
4783 OverApproximateWeakClosure( 4824 OverApproximateWeakClosure(
4784 "Idle notification: overapproximate weak closure"); 4825 "Idle notification: overapproximate weak closure");
4785 return true; 4826 return true;
4786 } else if (incremental_marking()->IsComplete() || 4827 } else if (incremental_marking()->IsComplete() ||
4787 (mark_compact_collector_.marking_deque()->IsEmpty() && 4828 (mark_compact_collector_.marking_deque()->IsEmpty() &&
4788 gc_idle_time_handler_.ShouldDoFinalIncrementalMarkCompact( 4829 gc_idle_time_handler_.ShouldDoFinalIncrementalMarkCompact(
4789 static_cast<size_t>(idle_time_in_ms), size_of_objects, 4830 static_cast<size_t>(idle_time_in_ms), size_of_objects,
4790 final_incremental_mark_compact_speed_in_bytes_per_ms))) { 4831 final_incremental_mark_compact_speed_in_bytes_per_ms))) {
4791 CollectAllGarbage(kNoGCFlags, "idle notification: finalize incremental"); 4832 CollectAllGarbage(kNoGCFlags, "idle notification: finalize incremental");
4792 gc_idle_time_handler_.NotifyIdleMarkCompact();
4793 return true; 4833 return true;
4794 } 4834 }
4795 return false; 4835 return false;
4796 } 4836 }
4797 4837
4798 4838
4799 GCIdleTimeHandler::HeapState Heap::ComputeHeapState() { 4839 GCIdleTimeHandler::HeapState Heap::ComputeHeapState() {
4800 GCIdleTimeHandler::HeapState heap_state; 4840 GCIdleTimeHandler::HeapState heap_state;
4801 heap_state.contexts_disposed = contexts_disposed_; 4841 heap_state.contexts_disposed = contexts_disposed_;
4802 heap_state.contexts_disposal_rate = 4842 heap_state.contexts_disposal_rate =
(...skipping 10 matching lines...) Expand all
4813 tracer()->IncrementalMarkingSpeedInBytesPerMillisecond()); 4853 tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
4814 heap_state.final_incremental_mark_compact_speed_in_bytes_per_ms = 4854 heap_state.final_incremental_mark_compact_speed_in_bytes_per_ms =
4815 static_cast<size_t>( 4855 static_cast<size_t>(
4816 tracer()->FinalIncrementalMarkCompactSpeedInBytesPerMillisecond()); 4856 tracer()->FinalIncrementalMarkCompactSpeedInBytesPerMillisecond());
4817 heap_state.scavenge_speed_in_bytes_per_ms = 4857 heap_state.scavenge_speed_in_bytes_per_ms =
4818 static_cast<size_t>(tracer()->ScavengeSpeedInBytesPerMillisecond()); 4858 static_cast<size_t>(tracer()->ScavengeSpeedInBytesPerMillisecond());
4819 heap_state.used_new_space_size = new_space_.Size(); 4859 heap_state.used_new_space_size = new_space_.Size();
4820 heap_state.new_space_capacity = new_space_.Capacity(); 4860 heap_state.new_space_capacity = new_space_.Capacity();
4821 heap_state.new_space_allocation_throughput_in_bytes_per_ms = 4861 heap_state.new_space_allocation_throughput_in_bytes_per_ms =
4822 tracer()->NewSpaceAllocationThroughputInBytesPerMillisecond(); 4862 tracer()->NewSpaceAllocationThroughputInBytesPerMillisecond();
4823 heap_state.has_low_allocation_rate = HasLowAllocationRate();
4824 intptr_t limit = old_generation_allocation_limit_;
4825 if (heap_state.has_low_allocation_rate) {
4826 limit = idle_old_generation_allocation_limit_;
4827 }
4828 heap_state.can_start_incremental_marking =
4829 incremental_marking()->CanBeActivated() &&
4830 HeapIsFullEnoughToStartIncrementalMarking(limit) &&
4831 !mark_compact_collector()->sweeping_in_progress();
4832 return heap_state; 4863 return heap_state;
4833 } 4864 }
4834 4865
4835 4866
4836 bool Heap::PerformIdleTimeAction(GCIdleTimeAction action, 4867 bool Heap::PerformIdleTimeAction(GCIdleTimeAction action,
4837 GCIdleTimeHandler::HeapState heap_state, 4868 GCIdleTimeHandler::HeapState heap_state,
4838 double deadline_in_ms) { 4869 double deadline_in_ms) {
4839 bool result = false; 4870 bool result = false;
4840 switch (action.type) { 4871 switch (action.type) {
4841 case DONE: 4872 case DONE:
4842 result = true; 4873 result = true;
4843 break; 4874 break;
4844 case DO_INCREMENTAL_MARKING: { 4875 case DO_INCREMENTAL_MARKING: {
4845 if (incremental_marking()->IsStopped()) { 4876 DCHECK(!incremental_marking()->IsStopped());
4846 incremental_marking()->Start(
4847 action.reduce_memory ? kReduceMemoryFootprintMask : kNoGCFlags);
4848 }
4849 double remaining_idle_time_in_ms = 0.0; 4877 double remaining_idle_time_in_ms = 0.0;
4850 do { 4878 do {
4851 incremental_marking()->Step( 4879 incremental_marking()->Step(
4852 action.parameter, IncrementalMarking::NO_GC_VIA_STACK_GUARD, 4880 action.parameter, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
4853 IncrementalMarking::FORCE_MARKING, 4881 IncrementalMarking::FORCE_MARKING,
4854 IncrementalMarking::DO_NOT_FORCE_COMPLETION); 4882 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
4855 remaining_idle_time_in_ms = 4883 remaining_idle_time_in_ms =
4856 deadline_in_ms - MonotonicallyIncreasingTimeInMs(); 4884 deadline_in_ms - MonotonicallyIncreasingTimeInMs();
4857 } while (remaining_idle_time_in_ms >= 4885 } while (remaining_idle_time_in_ms >=
4858 2.0 * GCIdleTimeHandler::kIncrementalMarkingStepTimeInMs && 4886 2.0 * GCIdleTimeHandler::kIncrementalMarkingStepTimeInMs &&
4859 !incremental_marking()->IsComplete() && 4887 !incremental_marking()->IsComplete() &&
4860 !mark_compact_collector_.marking_deque()->IsEmpty()); 4888 !mark_compact_collector_.marking_deque()->IsEmpty());
4861 if (remaining_idle_time_in_ms > 0.0) { 4889 if (remaining_idle_time_in_ms > 0.0) {
4862 action.additional_work = TryFinalizeIdleIncrementalMarking( 4890 action.additional_work = TryFinalizeIdleIncrementalMarking(
4863 remaining_idle_time_in_ms, heap_state.size_of_objects, 4891 remaining_idle_time_in_ms, heap_state.size_of_objects,
4864 heap_state.final_incremental_mark_compact_speed_in_bytes_per_ms); 4892 heap_state.final_incremental_mark_compact_speed_in_bytes_per_ms);
4865 } 4893 }
4866 break; 4894 break;
4867 } 4895 }
4868 case DO_FULL_GC: { 4896 case DO_FULL_GC: {
4869 if (action.reduce_memory) { 4897 DCHECK(contexts_disposed_ > 0);
4870 isolate_->compilation_cache()->Clear(); 4898 HistogramTimerScope scope(isolate_->counters()->gc_context());
4871 } 4899 CollectAllGarbage(kNoGCFlags, "idle notification: contexts disposed");
4872 if (contexts_disposed_) {
4873 HistogramTimerScope scope(isolate_->counters()->gc_context());
4874 CollectAllGarbage(kNoGCFlags, "idle notification: contexts disposed");
4875 } else {
4876 CollectAllGarbage(kReduceMemoryFootprintMask,
4877 "idle notification: finalize idle round");
4878 }
4879 gc_idle_time_handler_.NotifyIdleMarkCompact();
4880 break; 4900 break;
4881 } 4901 }
4882 case DO_SCAVENGE: 4902 case DO_SCAVENGE:
4883 CollectGarbage(NEW_SPACE, "idle notification: scavenge"); 4903 CollectGarbage(NEW_SPACE, "idle notification: scavenge");
4884 break; 4904 break;
4885 case DO_FINALIZE_SWEEPING: 4905 case DO_FINALIZE_SWEEPING:
4886 mark_compact_collector()->EnsureSweepingCompleted(); 4906 mark_compact_collector()->EnsureSweepingCompleted();
4887 break; 4907 break;
4888 case DO_NOTHING: 4908 case DO_NOTHING:
4889 break; 4909 break;
(...skipping 715 matching lines...) Expand 10 before | Expand all | Expand 10 after
5605 if (max_old_generation_size_ <= kMaxOldSpaceSizeMediumMemoryDevice || 5625 if (max_old_generation_size_ <= kMaxOldSpaceSizeMediumMemoryDevice ||
5606 FLAG_optimize_for_size) { 5626 FLAG_optimize_for_size) {
5607 factor = Min(factor, kMaxHeapGrowingFactorMemoryConstrained); 5627 factor = Min(factor, kMaxHeapGrowingFactorMemoryConstrained);
5608 } 5628 }
5609 5629
5610 if (FLAG_stress_compaction || 5630 if (FLAG_stress_compaction ||
5611 mark_compact_collector()->reduce_memory_footprint_) { 5631 mark_compact_collector()->reduce_memory_footprint_) {
5612 factor = kMinHeapGrowingFactor; 5632 factor = kMinHeapGrowingFactor;
5613 } 5633 }
5614 5634
5615 // TODO(hpayer): Investigate if idle_old_generation_allocation_limit_ is still
5616 // needed after taking the allocation rate for the old generation limit into
5617 // account.
5618 double idle_factor = Min(factor, kMaxHeapGrowingFactorIdle);
5619
5620 old_generation_allocation_limit_ = 5635 old_generation_allocation_limit_ =
5621 CalculateOldGenerationAllocationLimit(factor, old_gen_size); 5636 CalculateOldGenerationAllocationLimit(factor, old_gen_size);
5622 idle_old_generation_allocation_limit_ =
5623 CalculateOldGenerationAllocationLimit(idle_factor, old_gen_size);
5624 5637
5625 if (FLAG_trace_gc_verbose) { 5638 if (FLAG_trace_gc_verbose) {
5626 PrintIsolate( 5639 PrintIsolate(isolate_, "Grow: old size: %" V8_PTR_PREFIX
5627 isolate_, 5640 "d KB, new limit: %" V8_PTR_PREFIX "d KB (%.1f)\n",
5628 "Grow: old size: %" V8_PTR_PREFIX "d KB, new limit: %" V8_PTR_PREFIX 5641 old_gen_size / KB, old_generation_allocation_limit_ / KB,
5629 "d KB (%.1f), new idle limit: %" V8_PTR_PREFIX "d KB (%.1f)\n", 5642 factor);
5630 old_gen_size / KB, old_generation_allocation_limit_ / KB, factor,
5631 idle_old_generation_allocation_limit_ / KB, idle_factor);
5632 } 5643 }
5633 } 5644 }
5634 5645
5635 5646
5636 void Heap::DampenOldGenerationAllocationLimit(intptr_t old_gen_size, 5647 void Heap::DampenOldGenerationAllocationLimit(intptr_t old_gen_size,
5637 double gc_speed, 5648 double gc_speed,
5638 double mutator_speed) { 5649 double mutator_speed) {
5639 double factor = HeapGrowingFactor(gc_speed, mutator_speed); 5650 double factor = HeapGrowingFactor(gc_speed, mutator_speed);
5640 intptr_t limit = CalculateOldGenerationAllocationLimit(factor, old_gen_size); 5651 intptr_t limit = CalculateOldGenerationAllocationLimit(factor, old_gen_size);
5641 if (limit < old_generation_allocation_limit_) { 5652 if (limit < old_generation_allocation_limit_) {
(...skipping 1224 matching lines...) Expand 10 before | Expand all | Expand 10 after
6866 *object_type = "CODE_TYPE"; \ 6877 *object_type = "CODE_TYPE"; \
6867 *object_sub_type = "CODE_AGE/" #name; \ 6878 *object_sub_type = "CODE_AGE/" #name; \
6868 return true; 6879 return true;
6869 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) 6880 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME)
6870 #undef COMPARE_AND_RETURN_NAME 6881 #undef COMPARE_AND_RETURN_NAME
6871 } 6882 }
6872 return false; 6883 return false;
6873 } 6884 }
6874 } // namespace internal 6885 } // namespace internal
6875 } // namespace v8 6886 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/heap.h ('k') | src/heap/memory-reducer.h » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698