Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(9)

Side by Side Diff: src/heap/heap.cc

Issue 1218863002: Replace reduce-memory mode in idle notification with delayed clean-up GC. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Allow to start GC if fragmentation is high even Created 5 years, 5 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/accessors.h" 7 #include "src/accessors.h"
8 #include "src/api.h" 8 #include "src/api.h"
9 #include "src/base/bits.h" 9 #include "src/base/bits.h"
10 #include "src/base/once.h" 10 #include "src/base/once.h"
11 #include "src/base/utils/random-number-generator.h" 11 #include "src/base/utils/random-number-generator.h"
12 #include "src/bootstrapper.h" 12 #include "src/bootstrapper.h"
13 #include "src/codegen.h" 13 #include "src/codegen.h"
14 #include "src/compilation-cache.h" 14 #include "src/compilation-cache.h"
15 #include "src/conversions.h" 15 #include "src/conversions.h"
16 #include "src/cpu-profiler.h" 16 #include "src/cpu-profiler.h"
17 #include "src/debug.h" 17 #include "src/debug.h"
18 #include "src/deoptimizer.h" 18 #include "src/deoptimizer.h"
19 #include "src/global-handles.h" 19 #include "src/global-handles.h"
20 #include "src/heap/cleanup-gc.h"
20 #include "src/heap/gc-idle-time-handler.h" 21 #include "src/heap/gc-idle-time-handler.h"
21 #include "src/heap/incremental-marking.h" 22 #include "src/heap/incremental-marking.h"
22 #include "src/heap/mark-compact.h" 23 #include "src/heap/mark-compact.h"
23 #include "src/heap/objects-visiting-inl.h" 24 #include "src/heap/objects-visiting-inl.h"
24 #include "src/heap/objects-visiting.h" 25 #include "src/heap/objects-visiting.h"
25 #include "src/heap/store-buffer.h" 26 #include "src/heap/store-buffer.h"
26 #include "src/heap-profiler.h" 27 #include "src/heap-profiler.h"
27 #include "src/runtime-profiler.h" 28 #include "src/runtime-profiler.h"
28 #include "src/scopeinfo.h" 29 #include "src/scopeinfo.h"
29 #include "src/snapshot/natives.h" 30 #include "src/snapshot/natives.h"
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after
100 raw_allocations_hash_(0), 101 raw_allocations_hash_(0),
101 dump_allocations_hash_countdown_(FLAG_dump_allocations_digest_at_alloc), 102 dump_allocations_hash_countdown_(FLAG_dump_allocations_digest_at_alloc),
102 ms_count_(0), 103 ms_count_(0),
103 gc_count_(0), 104 gc_count_(0),
104 remembered_unmapped_pages_index_(0), 105 remembered_unmapped_pages_index_(0),
105 unflattened_strings_length_(0), 106 unflattened_strings_length_(0),
106 #ifdef DEBUG 107 #ifdef DEBUG
107 allocation_timeout_(0), 108 allocation_timeout_(0),
108 #endif // DEBUG 109 #endif // DEBUG
109 old_generation_allocation_limit_(initial_old_generation_size_), 110 old_generation_allocation_limit_(initial_old_generation_size_),
110 idle_old_generation_allocation_limit_(
111 kMinimumOldGenerationAllocationLimit),
112 old_gen_exhausted_(false), 111 old_gen_exhausted_(false),
113 inline_allocation_disabled_(false), 112 inline_allocation_disabled_(false),
114 store_buffer_rebuilder_(store_buffer()), 113 store_buffer_rebuilder_(store_buffer()),
115 hidden_string_(NULL), 114 hidden_string_(NULL),
116 gc_safe_size_of_old_object_(NULL), 115 gc_safe_size_of_old_object_(NULL),
117 total_regexp_code_generated_(0), 116 total_regexp_code_generated_(0),
118 tracer_(this), 117 tracer_(this),
119 new_space_high_promotion_mode_active_(false), 118 new_space_high_promotion_mode_active_(false),
120 high_survival_rate_period_length_(0), 119 high_survival_rate_period_length_(0),
121 promoted_objects_size_(0), 120 promoted_objects_size_(0),
(...skipping 14 matching lines...) Expand all
136 max_alive_after_gc_(0), 135 max_alive_after_gc_(0),
137 min_in_mutator_(kMaxInt), 136 min_in_mutator_(kMaxInt),
138 marking_time_(0.0), 137 marking_time_(0.0),
139 sweeping_time_(0.0), 138 sweeping_time_(0.0),
140 last_idle_notification_time_(0.0), 139 last_idle_notification_time_(0.0),
141 last_gc_time_(0.0), 140 last_gc_time_(0.0),
142 mark_compact_collector_(this), 141 mark_compact_collector_(this),
143 store_buffer_(this), 142 store_buffer_(this),
144 marking_(this), 143 marking_(this),
145 incremental_marking_(this), 144 incremental_marking_(this),
145 cleanup_gc_(this),
146 full_codegen_bytes_generated_(0), 146 full_codegen_bytes_generated_(0),
147 crankshaft_codegen_bytes_generated_(0), 147 crankshaft_codegen_bytes_generated_(0),
148 new_space_allocation_counter_(0), 148 new_space_allocation_counter_(0),
149 old_generation_allocation_counter_(0), 149 old_generation_allocation_counter_(0),
150 old_generation_size_at_last_gc_(0), 150 old_generation_size_at_last_gc_(0),
151 gcs_since_last_deopt_(0), 151 gcs_since_last_deopt_(0),
152 allocation_sites_scratchpad_length_(0), 152 allocation_sites_scratchpad_length_(0),
153 ring_buffer_full_(false), 153 ring_buffer_full_(false),
154 ring_buffer_end_(0), 154 ring_buffer_end_(0),
155 promotion_queue_(this), 155 promotion_queue_(this),
(...skipping 763 matching lines...) Expand 10 before | Expand all | Expand 10 after
919 !mark_compact_collector_.marking_deque_.IsEmpty() && !FLAG_gc_global) { 919 !mark_compact_collector_.marking_deque_.IsEmpty() && !FLAG_gc_global) {
920 if (FLAG_trace_incremental_marking) { 920 if (FLAG_trace_incremental_marking) {
921 PrintF("[IncrementalMarking] Delaying MarkSweep.\n"); 921 PrintF("[IncrementalMarking] Delaying MarkSweep.\n");
922 } 922 }
923 collector = SCAVENGER; 923 collector = SCAVENGER;
924 collector_reason = "incremental marking delaying mark-sweep"; 924 collector_reason = "incremental marking delaying mark-sweep";
925 } 925 }
926 } 926 }
927 927
928 bool next_gc_likely_to_collect_more = false; 928 bool next_gc_likely_to_collect_more = false;
929 intptr_t committed_memory_before;
930
931 if (collector == MARK_COMPACTOR) {
932 committed_memory_before = CommittedOldGenerationMemory();
933 }
929 934
930 { 935 {
931 tracer()->Start(collector, gc_reason, collector_reason); 936 tracer()->Start(collector, gc_reason, collector_reason);
932 DCHECK(AllowHeapAllocation::IsAllowed()); 937 DCHECK(AllowHeapAllocation::IsAllowed());
933 DisallowHeapAllocation no_allocation_during_gc; 938 DisallowHeapAllocation no_allocation_during_gc;
934 GarbageCollectionPrologue(); 939 GarbageCollectionPrologue();
935 940
936 { 941 {
937 HistogramTimerScope histogram_timer_scope( 942 HistogramTimerScope histogram_timer_scope(
938 (collector == SCAVENGER) ? isolate_->counters()->gc_scavenger() 943 (collector == SCAVENGER) ? isolate_->counters()->gc_scavenger()
939 : isolate_->counters()->gc_compactor()); 944 : isolate_->counters()->gc_compactor());
940 next_gc_likely_to_collect_more = 945 next_gc_likely_to_collect_more =
941 PerformGarbageCollection(collector, gc_callback_flags); 946 PerformGarbageCollection(collector, gc_callback_flags);
942 } 947 }
943 948
944 GarbageCollectionEpilogue(); 949 GarbageCollectionEpilogue();
945 if (collector == MARK_COMPACTOR && FLAG_track_detached_contexts) { 950 if (collector == MARK_COMPACTOR && FLAG_track_detached_contexts) {
946 isolate()->CheckDetachedContextsAfterGC(); 951 isolate()->CheckDetachedContextsAfterGC();
947 } 952 }
948 953
949 if (collector == MARK_COMPACTOR) { 954 if (collector == MARK_COMPACTOR) {
950 gc_idle_time_handler_.NotifyMarkCompact(next_gc_likely_to_collect_more); 955 intptr_t committed_memory_after = CommittedOldGenerationMemory();
956 intptr_t used_memory_after = PromotedSpaceSizeOfObjects();
957 CleanupGC::Event event;
958 event.type = CleanupGC::kMarkCompact;
959 event.time_ms = MonotonicallyIncreasingTimeInMs();
960 // Trigger one more GC if
961 // - this GC decreased committed memory,
962 // - there is high fragmentation,
963 // - there are live detached contexts.
964 event.next_gc_likely_to_collect_more =
965 (committed_memory_before - committed_memory_after) > MB ||
966 HasHighFragmentation(used_memory_after, committed_memory_after) ||
967 (detached_contexts()->length() > 0);
968 cleanup_gc_.NotifyMarkCompact(event);
951 } else { 969 } else {
952 gc_idle_time_handler_.NotifyScavenge(); 970 CleanupGC::Event event;
971 event.type = CleanupGC::kScavenge;
972 event.time_ms = MonotonicallyIncreasingTimeInMs();
973 cleanup_gc_.NotifyScavenge(event);
953 } 974 }
954 975
955 tracer()->Stop(collector); 976 tracer()->Stop(collector);
956 } 977 }
957 978
958 if (collector == MARK_COMPACTOR && 979 if (collector == MARK_COMPACTOR &&
959 (gc_callback_flags & kGCCallbackFlagForced) != 0) { 980 (gc_callback_flags & kGCCallbackFlagForced) != 0) {
960 isolate()->CountUsage(v8::Isolate::kForcedGC); 981 isolate()->CountUsage(v8::Isolate::kForcedGC);
961 } 982 }
962 983
(...skipping 14 matching lines...) Expand all
977 tracer()->ResetSurvivalEvents(); 998 tracer()->ResetSurvivalEvents();
978 old_generation_size_configured_ = false; 999 old_generation_size_configured_ = false;
979 } 1000 }
980 if (isolate()->concurrent_recompilation_enabled()) { 1001 if (isolate()->concurrent_recompilation_enabled()) {
981 // Flush the queued recompilation tasks. 1002 // Flush the queued recompilation tasks.
982 isolate()->optimizing_compile_dispatcher()->Flush(); 1003 isolate()->optimizing_compile_dispatcher()->Flush();
983 } 1004 }
984 AgeInlineCaches(); 1005 AgeInlineCaches();
985 set_retained_maps(ArrayList::cast(empty_fixed_array())); 1006 set_retained_maps(ArrayList::cast(empty_fixed_array()));
986 tracer()->AddContextDisposalTime(base::OS::TimeCurrentMillis()); 1007 tracer()->AddContextDisposalTime(base::OS::TimeCurrentMillis());
1008 CleanupGC::Event event;
1009 event.type = CleanupGC::kContextDisposed;
1010 event.time_ms = MonotonicallyIncreasingTimeInMs();
1011 cleanup_gc_.NotifyContextDisposed(event);
987 return ++contexts_disposed_; 1012 return ++contexts_disposed_;
988 } 1013 }
989 1014
990 1015
991 void Heap::MoveElements(FixedArray* array, int dst_index, int src_index, 1016 void Heap::MoveElements(FixedArray* array, int dst_index, int src_index,
992 int len) { 1017 int len) {
993 if (len == 0) return; 1018 if (len == 0) return;
994 1019
995 DCHECK(array->map() != fixed_cow_array_map()); 1020 DCHECK(array->map() != fixed_cow_array_map());
996 Object** dst_objects = array->data_start() + dst_index; 1021 Object** dst_objects = array->data_start() + dst_index;
(...skipping 3742 matching lines...) Expand 10 before | Expand all | Expand 10 after
4739 return mutator_utilization > high_mutator_utilization; 4764 return mutator_utilization > high_mutator_utilization;
4740 } 4765 }
4741 4766
4742 4767
4743 bool Heap::HasLowAllocationRate() { 4768 bool Heap::HasLowAllocationRate() {
4744 return HasLowYoungGenerationAllocationRate() && 4769 return HasLowYoungGenerationAllocationRate() &&
4745 HasLowOldGenerationAllocationRate(); 4770 HasLowOldGenerationAllocationRate();
4746 } 4771 }
4747 4772
4748 4773
4774 bool Heap::HasHighFragmentation() {
4775 intptr_t used = PromotedSpaceSizeOfObjects();
4776 intptr_t committed = CommittedOldGenerationMemory();
4777 return HasHighFragmentation(used, committed);
4778 }
4779
4780
4781 bool Heap::HasHighFragmentation(intptr_t used, intptr_t committed) {
4782 const intptr_t kSlack = 16 * MB;
4783 // Fragmentation is high if committed > 2 * used + kSlack.
4784 // Rewrite the exression to avoid overflow.
4785 return committed - used > used + kSlack;
4786 }
4787
4788
4749 void Heap::ReduceNewSpaceSize() { 4789 void Heap::ReduceNewSpaceSize() {
4750 if (!FLAG_predictable && HasLowAllocationRate()) { 4790 if (!FLAG_predictable && HasLowAllocationRate()) {
4751 new_space_.Shrink(); 4791 new_space_.Shrink();
4752 UncommitFromSpace(); 4792 UncommitFromSpace();
4753 } 4793 }
4754 } 4794 }
4755 4795
4756 4796
4757 bool Heap::TryFinalizeIdleIncrementalMarking( 4797 bool Heap::TryFinalizeIdleIncrementalMarking(
4758 double idle_time_in_ms, size_t size_of_objects, 4798 double idle_time_in_ms, size_t size_of_objects,
4759 size_t final_incremental_mark_compact_speed_in_bytes_per_ms) { 4799 size_t final_incremental_mark_compact_speed_in_bytes_per_ms) {
4760 if (FLAG_overapproximate_weak_closure && 4800 if (FLAG_overapproximate_weak_closure &&
4761 (incremental_marking()->IsReadyToOverApproximateWeakClosure() || 4801 (incremental_marking()->IsReadyToOverApproximateWeakClosure() ||
4762 (!incremental_marking()->weak_closure_was_overapproximated() && 4802 (!incremental_marking()->weak_closure_was_overapproximated() &&
4763 mark_compact_collector_.marking_deque()->IsEmpty() && 4803 mark_compact_collector_.marking_deque()->IsEmpty() &&
4764 gc_idle_time_handler_.ShouldDoOverApproximateWeakClosure( 4804 gc_idle_time_handler_.ShouldDoOverApproximateWeakClosure(
4765 static_cast<size_t>(idle_time_in_ms))))) { 4805 static_cast<size_t>(idle_time_in_ms))))) {
4766 OverApproximateWeakClosure( 4806 OverApproximateWeakClosure(
4767 "Idle notification: overapproximate weak closure"); 4807 "Idle notification: overapproximate weak closure");
4768 return true; 4808 return true;
4769 } else if (incremental_marking()->IsComplete() || 4809 } else if (incremental_marking()->IsComplete() ||
4770 (mark_compact_collector_.marking_deque()->IsEmpty() && 4810 (mark_compact_collector_.marking_deque()->IsEmpty() &&
4771 gc_idle_time_handler_.ShouldDoFinalIncrementalMarkCompact( 4811 gc_idle_time_handler_.ShouldDoFinalIncrementalMarkCompact(
4772 static_cast<size_t>(idle_time_in_ms), size_of_objects, 4812 static_cast<size_t>(idle_time_in_ms), size_of_objects,
4773 final_incremental_mark_compact_speed_in_bytes_per_ms))) { 4813 final_incremental_mark_compact_speed_in_bytes_per_ms))) {
4774 CollectAllGarbage(kNoGCFlags, "idle notification: finalize incremental"); 4814 CollectAllGarbage(kNoGCFlags, "idle notification: finalize incremental");
4775 gc_idle_time_handler_.NotifyIdleMarkCompact();
4776 return true; 4815 return true;
4777 } 4816 }
4778 return false; 4817 return false;
4779 } 4818 }
4780 4819
4781 4820
4782 GCIdleTimeHandler::HeapState Heap::ComputeHeapState() { 4821 GCIdleTimeHandler::HeapState Heap::ComputeHeapState() {
4783 GCIdleTimeHandler::HeapState heap_state; 4822 GCIdleTimeHandler::HeapState heap_state;
4784 heap_state.contexts_disposed = contexts_disposed_; 4823 heap_state.contexts_disposed = contexts_disposed_;
4785 heap_state.contexts_disposal_rate = 4824 heap_state.contexts_disposal_rate =
(...skipping 10 matching lines...) Expand all
4796 tracer()->IncrementalMarkingSpeedInBytesPerMillisecond()); 4835 tracer()->IncrementalMarkingSpeedInBytesPerMillisecond());
4797 heap_state.final_incremental_mark_compact_speed_in_bytes_per_ms = 4836 heap_state.final_incremental_mark_compact_speed_in_bytes_per_ms =
4798 static_cast<size_t>( 4837 static_cast<size_t>(
4799 tracer()->FinalIncrementalMarkCompactSpeedInBytesPerMillisecond()); 4838 tracer()->FinalIncrementalMarkCompactSpeedInBytesPerMillisecond());
4800 heap_state.scavenge_speed_in_bytes_per_ms = 4839 heap_state.scavenge_speed_in_bytes_per_ms =
4801 static_cast<size_t>(tracer()->ScavengeSpeedInBytesPerMillisecond()); 4840 static_cast<size_t>(tracer()->ScavengeSpeedInBytesPerMillisecond());
4802 heap_state.used_new_space_size = new_space_.Size(); 4841 heap_state.used_new_space_size = new_space_.Size();
4803 heap_state.new_space_capacity = new_space_.Capacity(); 4842 heap_state.new_space_capacity = new_space_.Capacity();
4804 heap_state.new_space_allocation_throughput_in_bytes_per_ms = 4843 heap_state.new_space_allocation_throughput_in_bytes_per_ms =
4805 tracer()->NewSpaceAllocationThroughputInBytesPerMillisecond(); 4844 tracer()->NewSpaceAllocationThroughputInBytesPerMillisecond();
4806 heap_state.has_low_allocation_rate = HasLowAllocationRate();
4807 intptr_t limit = old_generation_allocation_limit_;
4808 if (heap_state.has_low_allocation_rate) {
4809 limit = idle_old_generation_allocation_limit_;
4810 }
4811 heap_state.can_start_incremental_marking =
4812 incremental_marking()->CanBeActivated() &&
4813 HeapIsFullEnoughToStartIncrementalMarking(limit) &&
4814 !mark_compact_collector()->sweeping_in_progress();
4815 return heap_state; 4845 return heap_state;
4816 } 4846 }
4817 4847
4818 4848
4819 bool Heap::PerformIdleTimeAction(GCIdleTimeAction action, 4849 bool Heap::PerformIdleTimeAction(GCIdleTimeAction action,
4820 GCIdleTimeHandler::HeapState heap_state, 4850 GCIdleTimeHandler::HeapState heap_state,
4821 double deadline_in_ms) { 4851 double deadline_in_ms) {
4822 bool result = false; 4852 bool result = false;
4823 switch (action.type) { 4853 switch (action.type) {
4824 case DONE: 4854 case DONE:
4825 result = true; 4855 result = true;
4826 break; 4856 break;
4827 case DO_INCREMENTAL_MARKING: { 4857 case DO_INCREMENTAL_MARKING: {
4828 if (incremental_marking()->IsStopped()) { 4858 DCHECK(!incremental_marking()->IsStopped());
4829 incremental_marking()->Start(
4830 action.reduce_memory ? kReduceMemoryFootprintMask : kNoGCFlags);
4831 }
4832 double remaining_idle_time_in_ms = 0.0; 4859 double remaining_idle_time_in_ms = 0.0;
4833 do { 4860 do {
4834 incremental_marking()->Step( 4861 incremental_marking()->Step(
4835 action.parameter, IncrementalMarking::NO_GC_VIA_STACK_GUARD, 4862 action.parameter, IncrementalMarking::NO_GC_VIA_STACK_GUARD,
4836 IncrementalMarking::FORCE_MARKING, 4863 IncrementalMarking::FORCE_MARKING,
4837 IncrementalMarking::DO_NOT_FORCE_COMPLETION); 4864 IncrementalMarking::DO_NOT_FORCE_COMPLETION);
4838 remaining_idle_time_in_ms = 4865 remaining_idle_time_in_ms =
4839 deadline_in_ms - MonotonicallyIncreasingTimeInMs(); 4866 deadline_in_ms - MonotonicallyIncreasingTimeInMs();
4840 } while (remaining_idle_time_in_ms >= 4867 } while (remaining_idle_time_in_ms >=
4841 2.0 * GCIdleTimeHandler::kIncrementalMarkingStepTimeInMs && 4868 2.0 * GCIdleTimeHandler::kIncrementalMarkingStepTimeInMs &&
4842 !incremental_marking()->IsComplete() && 4869 !incremental_marking()->IsComplete() &&
4843 !mark_compact_collector_.marking_deque()->IsEmpty()); 4870 !mark_compact_collector_.marking_deque()->IsEmpty());
4844 if (remaining_idle_time_in_ms > 0.0) { 4871 if (remaining_idle_time_in_ms > 0.0) {
4845 action.additional_work = TryFinalizeIdleIncrementalMarking( 4872 action.additional_work = TryFinalizeIdleIncrementalMarking(
4846 remaining_idle_time_in_ms, heap_state.size_of_objects, 4873 remaining_idle_time_in_ms, heap_state.size_of_objects,
4847 heap_state.final_incremental_mark_compact_speed_in_bytes_per_ms); 4874 heap_state.final_incremental_mark_compact_speed_in_bytes_per_ms);
4848 } 4875 }
4849 break; 4876 break;
4850 } 4877 }
4851 case DO_FULL_GC: { 4878 case DO_FULL_GC: {
4852 if (action.reduce_memory) {
4853 isolate_->compilation_cache()->Clear();
4854 }
4855 if (contexts_disposed_) { 4879 if (contexts_disposed_) {
4856 HistogramTimerScope scope(isolate_->counters()->gc_context()); 4880 HistogramTimerScope scope(isolate_->counters()->gc_context());
4857 CollectAllGarbage(kNoGCFlags, "idle notification: contexts disposed"); 4881 CollectAllGarbage(kNoGCFlags, "idle notification: contexts disposed");
4858 } else { 4882 } else {
4859 CollectAllGarbage(kReduceMemoryFootprintMask, 4883 CollectAllGarbage(kReduceMemoryFootprintMask,
Hannes Payer (out of office) 2015/07/01 12:03:17 Could this call end an ongoing incremental marking
ulan 2015/07/01 12:51:48 I removed this completely and added DCHECK(context
4860 "idle notification: finalize idle round"); 4884 "idle notification: non-incremental mark-compact");
4861 } 4885 }
4862 gc_idle_time_handler_.NotifyIdleMarkCompact();
4863 break; 4886 break;
4864 } 4887 }
4865 case DO_SCAVENGE: 4888 case DO_SCAVENGE:
4866 CollectGarbage(NEW_SPACE, "idle notification: scavenge"); 4889 CollectGarbage(NEW_SPACE, "idle notification: scavenge");
4867 break; 4890 break;
4868 case DO_FINALIZE_SWEEPING: 4891 case DO_FINALIZE_SWEEPING:
4869 mark_compact_collector()->EnsureSweepingCompleted(); 4892 mark_compact_collector()->EnsureSweepingCompleted();
4870 break; 4893 break;
4871 case DO_NOTHING: 4894 case DO_NOTHING:
4872 break; 4895 break;
(...skipping 714 matching lines...) Expand 10 before | Expand all | Expand 10 after
5587 // memory-constrained devices. 5610 // memory-constrained devices.
5588 if (max_old_generation_size_ <= kMaxOldSpaceSizeMediumMemoryDevice) { 5611 if (max_old_generation_size_ <= kMaxOldSpaceSizeMediumMemoryDevice) {
5589 factor = Min(factor, kMaxHeapGrowingFactorMemoryConstrained); 5612 factor = Min(factor, kMaxHeapGrowingFactorMemoryConstrained);
5590 } 5613 }
5591 5614
5592 if (FLAG_stress_compaction || 5615 if (FLAG_stress_compaction ||
5593 mark_compact_collector()->reduce_memory_footprint_) { 5616 mark_compact_collector()->reduce_memory_footprint_) {
5594 factor = kMinHeapGrowingFactor; 5617 factor = kMinHeapGrowingFactor;
5595 } 5618 }
5596 5619
5597 // TODO(hpayer): Investigate if idle_old_generation_allocation_limit_ is still
5598 // needed after taking the allocation rate for the old generation limit into
5599 // account.
5600 double idle_factor = Min(factor, kMaxHeapGrowingFactorIdle);
5601
5602 old_generation_allocation_limit_ = 5620 old_generation_allocation_limit_ =
5603 CalculateOldGenerationAllocationLimit(factor, old_gen_size); 5621 CalculateOldGenerationAllocationLimit(factor, old_gen_size);
5604 idle_old_generation_allocation_limit_ =
5605 CalculateOldGenerationAllocationLimit(idle_factor, old_gen_size);
5606 5622
5607 if (FLAG_trace_gc_verbose) { 5623 if (FLAG_trace_gc_verbose) {
5608 PrintIsolate( 5624 PrintIsolate(isolate_, "Grow: old size: %" V8_PTR_PREFIX
5609 isolate_, 5625 "d KB, new limit: %" V8_PTR_PREFIX "d KB (%.1f)\n",
5610 "Grow: old size: %" V8_PTR_PREFIX "d KB, new limit: %" V8_PTR_PREFIX 5626 old_gen_size / KB, old_generation_allocation_limit_ / KB,
5611 "d KB (%.1f), new idle limit: %" V8_PTR_PREFIX "d KB (%.1f)\n", 5627 factor);
5612 old_gen_size / KB, old_generation_allocation_limit_ / KB, factor,
5613 idle_old_generation_allocation_limit_ / KB, idle_factor);
5614 } 5628 }
5615 } 5629 }
5616 5630
5617 5631
5618 void Heap::DampenOldGenerationAllocationLimit(intptr_t old_gen_size, 5632 void Heap::DampenOldGenerationAllocationLimit(intptr_t old_gen_size,
5619 double gc_speed, 5633 double gc_speed,
5620 double mutator_speed) { 5634 double mutator_speed) {
5621 double factor = HeapGrowingFactor(gc_speed, mutator_speed); 5635 double factor = HeapGrowingFactor(gc_speed, mutator_speed);
5622 intptr_t limit = CalculateOldGenerationAllocationLimit(factor, old_gen_size); 5636 intptr_t limit = CalculateOldGenerationAllocationLimit(factor, old_gen_size);
5623 if (limit < old_generation_allocation_limit_) { 5637 if (limit < old_generation_allocation_limit_) {
(...skipping 1222 matching lines...) Expand 10 before | Expand all | Expand 10 after
6846 *object_type = "CODE_TYPE"; \ 6860 *object_type = "CODE_TYPE"; \
6847 *object_sub_type = "CODE_AGE/" #name; \ 6861 *object_sub_type = "CODE_AGE/" #name; \
6848 return true; 6862 return true;
6849 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME) 6863 CODE_AGE_LIST_COMPLETE(COMPARE_AND_RETURN_NAME)
6850 #undef COMPARE_AND_RETURN_NAME 6864 #undef COMPARE_AND_RETURN_NAME
6851 } 6865 }
6852 return false; 6866 return false;
6853 } 6867 }
6854 } // namespace internal 6868 } // namespace internal
6855 } // namespace v8 6869 } // namespace v8
OLDNEW
« src/heap/gc-idle-time-handler.cc ('K') | « src/heap/heap.h ('k') | test/cctest/test-api.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698