OLD | NEW |
---|---|
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/gc-tracer.h" | 5 #include "src/heap/gc-tracer.h" |
6 | 6 |
7 #include "src/counters.h" | 7 #include "src/counters.h" |
8 #include "src/heap/heap-inl.h" | 8 #include "src/heap/heap-inl.h" |
9 #include "src/isolate.h" | 9 #include "src/isolate.h" |
10 | 10 |
(...skipping 70 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
81 incremental_marking_duration(0.0) { | 81 incremental_marking_duration(0.0) { |
82 for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) { | 82 for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) { |
83 scopes[i] = 0; | 83 scopes[i] = 0; |
84 } | 84 } |
85 } | 85 } |
86 | 86 |
87 | 87 |
88 const char* GCTracer::Event::TypeName(bool short_name) const { | 88 const char* GCTracer::Event::TypeName(bool short_name) const { |
89 switch (type) { | 89 switch (type) { |
90 case SCAVENGER: | 90 case SCAVENGER: |
91 if (short_name) { | 91 return (short_name) ? "s" : "Scavenge"; |
92 return "s"; | |
93 } else { | |
94 return "Scavenge"; | |
95 } | |
96 case MARK_COMPACTOR: | 92 case MARK_COMPACTOR: |
97 case INCREMENTAL_MARK_COMPACTOR: | 93 case INCREMENTAL_MARK_COMPACTOR: |
98 if (short_name) { | 94 return (short_name) ? "ms" : "Mark-sweep"; |
99 return "ms"; | 95 case MINOR_MARK_COMPACTOR: |
100 } else { | 96 return (short_name) ? "mmc" : "Minor Mark-Compact"; |
101 return "Mark-sweep"; | |
102 } | |
103 case START: | 97 case START: |
104 if (short_name) { | 98 return (short_name) ? "st" : "Start"; |
105 return "st"; | |
106 } else { | |
107 return "Start"; | |
108 } | |
109 } | 99 } |
110 return "Unknown Event Type"; | 100 return "Unknown Event Type"; |
111 } | 101 } |
112 | 102 |
113 GCTracer::GCTracer(Heap* heap) | 103 GCTracer::GCTracer(Heap* heap) |
114 : heap_(heap), | 104 : heap_(heap), |
115 current_(Event::START, GarbageCollectionReason::kUnknown, nullptr), | 105 current_(Event::START, GarbageCollectionReason::kUnknown, nullptr), |
116 previous_(current_), | 106 previous_(current_), |
117 incremental_marking_bytes_(0), | 107 incremental_marking_bytes_(0), |
118 incremental_marking_duration_(0.0), | 108 incremental_marking_duration_(0.0), |
109 incremental_marking_start_time_(0.0), | |
119 recorded_incremental_marking_speed_(0.0), | 110 recorded_incremental_marking_speed_(0.0), |
120 allocation_time_ms_(0.0), | 111 allocation_time_ms_(0.0), |
121 new_space_allocation_counter_bytes_(0), | 112 new_space_allocation_counter_bytes_(0), |
122 old_generation_allocation_counter_bytes_(0), | 113 old_generation_allocation_counter_bytes_(0), |
123 allocation_duration_since_gc_(0.0), | 114 allocation_duration_since_gc_(0.0), |
124 new_space_allocation_in_bytes_since_gc_(0), | 115 new_space_allocation_in_bytes_since_gc_(0), |
125 old_generation_allocation_in_bytes_since_gc_(0), | 116 old_generation_allocation_in_bytes_since_gc_(0), |
126 combined_mark_compact_speed_cache_(0.0), | 117 combined_mark_compact_speed_cache_(0.0), |
127 start_counter_(0) { | 118 start_counter_(0) { |
128 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs(); | 119 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs(); |
(...skipping 27 matching lines...) Expand all Loading... | |
156 GarbageCollectionReason gc_reason, | 147 GarbageCollectionReason gc_reason, |
157 const char* collector_reason) { | 148 const char* collector_reason) { |
158 start_counter_++; | 149 start_counter_++; |
159 if (start_counter_ != 1) return; | 150 if (start_counter_ != 1) return; |
160 | 151 |
161 previous_ = current_; | 152 previous_ = current_; |
162 double start_time = heap_->MonotonicallyIncreasingTimeInMs(); | 153 double start_time = heap_->MonotonicallyIncreasingTimeInMs(); |
163 SampleAllocation(start_time, heap_->NewSpaceAllocationCounter(), | 154 SampleAllocation(start_time, heap_->NewSpaceAllocationCounter(), |
164 heap_->OldGenerationAllocationCounter()); | 155 heap_->OldGenerationAllocationCounter()); |
165 | 156 |
166 if (collector == SCAVENGER) { | 157 if (collector == SCAVENGER) { |
ulan
2016/11/11 10:22:10
Nit: use switch statement.
Michael Lippautz
2016/11/11 10:38:22
Done.
| |
167 current_ = Event(Event::SCAVENGER, gc_reason, collector_reason); | 158 current_ = Event(Event::SCAVENGER, gc_reason, collector_reason); |
159 } else if (collector == MINOR_MARK_COMPACTOR) { | |
160 current_ = Event(Event::MINOR_MARK_COMPACTOR, gc_reason, collector_reason); | |
168 } else if (collector == MARK_COMPACTOR) { | 161 } else if (collector == MARK_COMPACTOR) { |
169 if (heap_->incremental_marking()->WasActivated()) { | 162 if (heap_->incremental_marking()->WasActivated()) { |
170 current_ = | 163 current_ = |
171 Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason, collector_reason); | 164 Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason, collector_reason); |
172 } else { | 165 } else { |
173 current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason); | 166 current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason); |
174 } | 167 } |
175 } | 168 } |
176 | 169 |
177 current_.reduce_memory = heap_->ShouldReduceMemory(); | 170 current_.reduce_memory = heap_->ShouldReduceMemory(); |
178 current_.start_time = start_time; | 171 current_.start_time = start_time; |
179 current_.start_object_size = heap_->SizeOfObjects(); | 172 current_.start_object_size = heap_->SizeOfObjects(); |
180 current_.start_memory_size = heap_->memory_allocator()->Size(); | 173 current_.start_memory_size = heap_->memory_allocator()->Size(); |
181 current_.start_holes_size = CountTotalHolesSize(heap_); | 174 current_.start_holes_size = CountTotalHolesSize(heap_); |
182 current_.new_space_object_size = | 175 current_.new_space_object_size = |
183 heap_->new_space()->top() - heap_->new_space()->bottom(); | 176 heap_->new_space()->top() - heap_->new_space()->bottom(); |
184 | 177 |
185 current_.incremental_marking_bytes = 0; | 178 current_.incremental_marking_bytes = 0; |
186 current_.incremental_marking_duration = 0; | 179 current_.incremental_marking_duration = 0; |
187 | 180 |
188 for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) { | 181 for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) { |
189 current_.scopes[i] = 0; | 182 current_.scopes[i] = 0; |
190 } | 183 } |
191 | 184 |
192 size_t committed_memory = heap_->CommittedMemory() / KB; | 185 size_t committed_memory = heap_->CommittedMemory() / KB; |
193 size_t used_memory = current_.start_object_size / KB; | 186 size_t used_memory = current_.start_object_size / KB; |
194 | 187 |
195 Counters* counters = heap_->isolate()->counters(); | 188 Counters* counters = heap_->isolate()->counters(); |
196 | 189 |
197 if (collector == SCAVENGER) { | 190 if (Heap::IsYoungGenerationCollector(collector)) { |
198 counters->scavenge_reason()->AddSample(static_cast<int>(gc_reason)); | 191 counters->scavenge_reason()->AddSample(static_cast<int>(gc_reason)); |
199 } else { | 192 } else { |
200 counters->mark_compact_reason()->AddSample(static_cast<int>(gc_reason)); | 193 counters->mark_compact_reason()->AddSample(static_cast<int>(gc_reason)); |
201 } | 194 } |
202 counters->aggregated_memory_heap_committed()->AddSample(start_time, | 195 counters->aggregated_memory_heap_committed()->AddSample(start_time, |
203 committed_memory); | 196 committed_memory); |
204 counters->aggregated_memory_heap_used()->AddSample(start_time, used_memory); | 197 counters->aggregated_memory_heap_used()->AddSample(start_time, used_memory); |
205 // TODO(cbruni): remove once we fully moved to a trace-based system. | 198 // TODO(cbruni): remove once we fully moved to a trace-based system. |
206 if (V8_UNLIKELY(FLAG_runtime_stats)) { | 199 if (V8_UNLIKELY(FLAG_runtime_stats)) { |
207 RuntimeCallStats::Enter(heap_->isolate()->counters()->runtime_call_stats(), | 200 RuntimeCallStats::Enter(heap_->isolate()->counters()->runtime_call_stats(), |
208 &timer_, &RuntimeCallStats::GC); | 201 &timer_, &RuntimeCallStats::GC); |
209 } | 202 } |
210 } | 203 } |
211 | 204 |
212 void GCTracer::ResetIncrementalMarkingCounters() { | 205 void GCTracer::ResetIncrementalMarkingCounters() { |
213 incremental_marking_bytes_ = 0; | 206 incremental_marking_bytes_ = 0; |
214 incremental_marking_duration_ = 0; | 207 incremental_marking_duration_ = 0; |
215 for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) { | 208 for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) { |
216 incremental_marking_scopes_[i].ResetCurrentCycle(); | 209 incremental_marking_scopes_[i].ResetCurrentCycle(); |
217 } | 210 } |
218 } | 211 } |
219 | 212 |
220 void GCTracer::Stop(GarbageCollector collector) { | 213 void GCTracer::Stop(GarbageCollector collector) { |
221 start_counter_--; | 214 start_counter_--; |
222 if (start_counter_ != 0) { | 215 if (start_counter_ != 0) { |
223 heap_->isolate()->PrintWithTimestamp( | 216 heap_->isolate()->PrintWithTimestamp( |
224 "[Finished reentrant %s during %s.]\n", | 217 "[Finished reentrant %s during %s.]\n", |
225 collector == SCAVENGER ? "Scavenge" : "Mark-sweep", | 218 collector == SCAVENGER |
ulan
2016/11/11 10:22:10
Nit: maybe extract to a separate function.
Michael Lippautz
2016/11/11 10:38:22
Done.
| |
219 ? "Scavenge" | |
220 : (collector == MINOR_MARK_COMPACTOR) ? "Minor Mark-Compact" | |
221 : "Mark-sweep", | |
226 current_.TypeName(false)); | 222 current_.TypeName(false)); |
227 return; | 223 return; |
228 } | 224 } |
229 | 225 |
230 DCHECK(start_counter_ >= 0); | 226 DCHECK(start_counter_ >= 0); |
231 DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) || | 227 DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) || |
228 (collector == MINOR_MARK_COMPACTOR && | |
229 current_.type == Event::MINOR_MARK_COMPACTOR) || | |
232 (collector == MARK_COMPACTOR && | 230 (collector == MARK_COMPACTOR && |
233 (current_.type == Event::MARK_COMPACTOR || | 231 (current_.type == Event::MARK_COMPACTOR || |
234 current_.type == Event::INCREMENTAL_MARK_COMPACTOR))); | 232 current_.type == Event::INCREMENTAL_MARK_COMPACTOR))); |
235 | 233 |
236 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs(); | 234 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs(); |
237 current_.end_object_size = heap_->SizeOfObjects(); | 235 current_.end_object_size = heap_->SizeOfObjects(); |
238 current_.end_memory_size = heap_->memory_allocator()->Size(); | 236 current_.end_memory_size = heap_->memory_allocator()->Size(); |
239 current_.end_holes_size = CountTotalHolesSize(heap_); | 237 current_.end_holes_size = CountTotalHolesSize(heap_); |
240 current_.survived_new_space_object_size = heap_->SurvivedNewSpaceObjectSize(); | 238 current_.survived_new_space_object_size = heap_->SurvivedNewSpaceObjectSize(); |
241 | 239 |
(...skipping 251 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
493 current_.end_holes_size, allocated_since_last_gc, | 491 current_.end_holes_size, allocated_since_last_gc, |
494 heap_->promoted_objects_size(), | 492 heap_->promoted_objects_size(), |
495 heap_->semi_space_copied_object_size(), | 493 heap_->semi_space_copied_object_size(), |
496 heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_, | 494 heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_, |
497 heap_->nodes_promoted_, heap_->promotion_ratio_, | 495 heap_->nodes_promoted_, heap_->promotion_ratio_, |
498 AverageSurvivalRatio(), heap_->promotion_rate_, | 496 AverageSurvivalRatio(), heap_->promotion_rate_, |
499 heap_->semi_space_copied_rate_, | 497 heap_->semi_space_copied_rate_, |
500 NewSpaceAllocationThroughputInBytesPerMillisecond(), | 498 NewSpaceAllocationThroughputInBytesPerMillisecond(), |
501 ContextDisposalRateInMilliseconds()); | 499 ContextDisposalRateInMilliseconds()); |
502 break; | 500 break; |
501 case Event::MINOR_MARK_COMPACTOR: | |
502 heap_->isolate()->PrintWithTimestamp( | |
503 "pause=%.1f " | |
504 "mutator=%.1f " | |
505 "gc=%s " | |
506 "reduce_memory=%d\n", | |
507 duration, spent_in_mutator, current_.TypeName(true), | |
508 current_.reduce_memory); | |
509 break; | |
503 case Event::MARK_COMPACTOR: | 510 case Event::MARK_COMPACTOR: |
504 case Event::INCREMENTAL_MARK_COMPACTOR: | 511 case Event::INCREMENTAL_MARK_COMPACTOR: |
505 heap_->isolate()->PrintWithTimestamp( | 512 heap_->isolate()->PrintWithTimestamp( |
506 "pause=%.1f " | 513 "pause=%.1f " |
507 "mutator=%.1f " | 514 "mutator=%.1f " |
508 "gc=%s " | 515 "gc=%s " |
509 "reduce_memory=%d " | 516 "reduce_memory=%d " |
510 "clear=%1.f " | 517 "clear=%1.f " |
511 "clear.code_flush=%.1f " | 518 "clear.code_flush=%.1f " |
512 "clear.dependent_code=%.1f " | 519 "clear.dependent_code=%.1f " |
(...skipping 299 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
812 } | 819 } |
813 | 820 |
814 void GCTracer::ResetSurvivalEvents() { recorded_survival_ratios_.Reset(); } | 821 void GCTracer::ResetSurvivalEvents() { recorded_survival_ratios_.Reset(); } |
815 | 822 |
816 void GCTracer::NotifyIncrementalMarkingStart() { | 823 void GCTracer::NotifyIncrementalMarkingStart() { |
817 incremental_marking_start_time_ = heap_->MonotonicallyIncreasingTimeInMs(); | 824 incremental_marking_start_time_ = heap_->MonotonicallyIncreasingTimeInMs(); |
818 } | 825 } |
819 | 826 |
820 } // namespace internal | 827 } // namespace internal |
821 } // namespace v8 | 828 } // namespace v8 |
OLD | NEW |