OLD | NEW |
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/v8.h" | 5 #include "src/v8.h" |
6 | 6 |
7 #include "src/heap/gc-tracer.h" | 7 #include "src/heap/gc-tracer.h" |
8 | 8 |
9 namespace v8 { | 9 namespace v8 { |
10 namespace internal { | 10 namespace internal { |
(...skipping 81 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
92 | 92 |
93 GCTracer::GCTracer(Heap* heap) | 93 GCTracer::GCTracer(Heap* heap) |
94 : heap_(heap), | 94 : heap_(heap), |
95 cumulative_incremental_marking_steps_(0), | 95 cumulative_incremental_marking_steps_(0), |
96 cumulative_incremental_marking_bytes_(0), | 96 cumulative_incremental_marking_bytes_(0), |
97 cumulative_incremental_marking_duration_(0.0), | 97 cumulative_incremental_marking_duration_(0.0), |
98 cumulative_pure_incremental_marking_duration_(0.0), | 98 cumulative_pure_incremental_marking_duration_(0.0), |
99 longest_incremental_marking_step_(0.0), | 99 longest_incremental_marking_step_(0.0), |
100 cumulative_marking_duration_(0.0), | 100 cumulative_marking_duration_(0.0), |
101 cumulative_sweeping_duration_(0.0), | 101 cumulative_sweeping_duration_(0.0), |
102 new_space_allocation_time_ms_(0.0), | 102 allocation_time_ms_(0.0), |
103 new_space_allocation_counter_bytes_(0), | 103 new_space_allocation_counter_bytes_(0), |
104 new_space_allocation_duration_since_gc_(0.0), | 104 old_generation_allocation_counter_bytes_(0), |
| 105 allocation_duration_since_gc_(0.0), |
105 new_space_allocation_in_bytes_since_gc_(0), | 106 new_space_allocation_in_bytes_since_gc_(0), |
| 107 old_generation_allocation_in_bytes_since_gc_(0), |
106 start_counter_(0) { | 108 start_counter_(0) { |
107 current_ = Event(Event::START, NULL, NULL); | 109 current_ = Event(Event::START, NULL, NULL); |
108 current_.end_time = base::OS::TimeCurrentMillis(); | 110 current_.end_time = base::OS::TimeCurrentMillis(); |
109 previous_ = previous_incremental_mark_compactor_event_ = current_; | 111 previous_ = previous_incremental_mark_compactor_event_ = current_; |
110 } | 112 } |
111 | 113 |
112 | 114 |
113 void GCTracer::Start(GarbageCollector collector, const char* gc_reason, | 115 void GCTracer::Start(GarbageCollector collector, const char* gc_reason, |
114 const char* collector_reason) { | 116 const char* collector_reason) { |
115 start_counter_++; | 117 start_counter_++; |
116 if (start_counter_ != 1) return; | 118 if (start_counter_ != 1) return; |
117 | 119 |
118 previous_ = current_; | 120 previous_ = current_; |
119 double start_time = heap_->MonotonicallyIncreasingTimeInMs(); | 121 double start_time = heap_->MonotonicallyIncreasingTimeInMs(); |
120 SampleNewSpaceAllocation(start_time, heap_->NewSpaceAllocationCounter()); | 122 SampleAllocation(start_time, heap_->NewSpaceAllocationCounter(), |
| 123 heap_->OldGenerationAllocationCounter()); |
121 if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) | 124 if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) |
122 previous_incremental_mark_compactor_event_ = current_; | 125 previous_incremental_mark_compactor_event_ = current_; |
123 | 126 |
124 if (collector == SCAVENGER) { | 127 if (collector == SCAVENGER) { |
125 current_ = Event(Event::SCAVENGER, gc_reason, collector_reason); | 128 current_ = Event(Event::SCAVENGER, gc_reason, collector_reason); |
126 } else if (collector == MARK_COMPACTOR) { | 129 } else if (collector == MARK_COMPACTOR) { |
127 if (heap_->incremental_marking()->WasActivated()) { | 130 if (heap_->incremental_marking()->WasActivated()) { |
128 current_ = | 131 current_ = |
129 Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason, collector_reason); | 132 Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason, collector_reason); |
130 } else { | 133 } else { |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
176 DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) || | 179 DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) || |
177 (collector == MARK_COMPACTOR && | 180 (collector == MARK_COMPACTOR && |
178 (current_.type == Event::MARK_COMPACTOR || | 181 (current_.type == Event::MARK_COMPACTOR || |
179 current_.type == Event::INCREMENTAL_MARK_COMPACTOR))); | 182 current_.type == Event::INCREMENTAL_MARK_COMPACTOR))); |
180 | 183 |
181 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs(); | 184 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs(); |
182 current_.end_object_size = heap_->SizeOfObjects(); | 185 current_.end_object_size = heap_->SizeOfObjects(); |
183 current_.end_memory_size = heap_->isolate()->memory_allocator()->Size(); | 186 current_.end_memory_size = heap_->isolate()->memory_allocator()->Size(); |
184 current_.end_holes_size = CountTotalHolesSize(heap_); | 187 current_.end_holes_size = CountTotalHolesSize(heap_); |
185 | 188 |
186 AddNewSpaceAllocation(current_.end_time); | 189 AddAllocation(current_.end_time); |
187 | 190 |
188 int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB); | 191 int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB); |
189 int used_memory = static_cast<int>(current_.end_object_size / KB); | 192 int used_memory = static_cast<int>(current_.end_object_size / KB); |
190 heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample( | 193 heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample( |
191 current_.end_time, committed_memory); | 194 current_.end_time, committed_memory); |
192 heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample( | 195 heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample( |
193 current_.end_time, used_memory); | 196 current_.end_time, used_memory); |
194 | 197 |
195 if (current_.type == Event::SCAVENGER) { | 198 if (current_.type == Event::SCAVENGER) { |
196 current_.incremental_marking_steps = | 199 current_.incremental_marking_steps = |
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
250 if (FLAG_trace_gc_nvp) | 253 if (FLAG_trace_gc_nvp) |
251 PrintNVP(); | 254 PrintNVP(); |
252 else | 255 else |
253 Print(); | 256 Print(); |
254 | 257 |
255 heap_->PrintShortHeapStatistics(); | 258 heap_->PrintShortHeapStatistics(); |
256 } | 259 } |
257 } | 260 } |
258 | 261 |
259 | 262 |
260 void GCTracer::SampleNewSpaceAllocation(double current_ms, | 263 void GCTracer::SampleAllocation(double current_ms, |
261 size_t counter_bytes) { | 264 size_t new_space_counter_bytes, |
262 if (new_space_allocation_time_ms_ == 0) { | 265 size_t old_generation_counter_bytes) { |
| 266 if (allocation_time_ms_ == 0) { |
263 // It is the first sample. | 267 // It is the first sample. |
264 new_space_allocation_time_ms_ = current_ms; | 268 allocation_time_ms_ = current_ms; |
265 new_space_allocation_counter_bytes_ = counter_bytes; | 269 new_space_allocation_counter_bytes_ = new_space_counter_bytes; |
| 270 old_generation_allocation_counter_bytes_ = old_generation_counter_bytes; |
266 return; | 271 return; |
267 } | 272 } |
268 // This assumes that counters are unsigned integers so that the subtraction | 273 // This assumes that counters are unsigned integers so that the subtraction |
269 // below works even if the new counter is less then the old counter. | 274 // below works even if the new counter is less then the old counter. |
270 size_t allocated_bytes = counter_bytes - new_space_allocation_counter_bytes_; | 275 size_t new_space_allocated_bytes = |
271 double duration = current_ms - new_space_allocation_time_ms_; | 276 new_space_counter_bytes - new_space_allocation_counter_bytes_; |
| 277 size_t old_generation_allocated_bytes = |
| 278 old_generation_counter_bytes - old_generation_allocation_counter_bytes_; |
| 279 double duration = current_ms - allocation_time_ms_; |
272 const double kMinDurationMs = 1; | 280 const double kMinDurationMs = 1; |
273 if (duration < kMinDurationMs) { | 281 if (duration < kMinDurationMs) { |
274 // Do not sample small durations to avoid precision errors. | 282 // Do not sample small durations to avoid precision errors. |
275 return; | 283 return; |
276 } | 284 } |
277 new_space_allocation_time_ms_ = current_ms; | 285 allocation_time_ms_ = current_ms; |
278 new_space_allocation_counter_bytes_ = counter_bytes; | 286 new_space_allocation_counter_bytes_ = new_space_counter_bytes; |
279 new_space_allocation_duration_since_gc_ += duration; | 287 old_generation_allocation_counter_bytes_ = old_generation_counter_bytes; |
280 new_space_allocation_in_bytes_since_gc_ += allocated_bytes; | 288 allocation_duration_since_gc_ += duration; |
| 289 new_space_allocation_in_bytes_since_gc_ += new_space_allocated_bytes; |
| 290 old_generation_allocation_in_bytes_since_gc_ += |
| 291 old_generation_allocated_bytes; |
281 } | 292 } |
282 | 293 |
283 | 294 |
284 void GCTracer::AddNewSpaceAllocation(double current_ms) { | 295 void GCTracer::AddAllocation(double current_ms) { |
285 new_space_allocation_time_ms_ = current_ms; | 296 allocation_time_ms_ = current_ms; |
| 297 new_space_allocation_events_.push_front(AllocationEvent( |
| 298 allocation_duration_since_gc_, new_space_allocation_in_bytes_since_gc_)); |
286 allocation_events_.push_front( | 299 allocation_events_.push_front( |
287 AllocationEvent(new_space_allocation_duration_since_gc_, | 300 AllocationEvent(allocation_duration_since_gc_, |
288 new_space_allocation_in_bytes_since_gc_)); | 301 new_space_allocation_in_bytes_since_gc_ + |
289 new_space_allocation_duration_since_gc_ = 0; | 302 old_generation_allocation_in_bytes_since_gc_)); |
| 303 allocation_duration_since_gc_ = 0; |
290 new_space_allocation_in_bytes_since_gc_ = 0; | 304 new_space_allocation_in_bytes_since_gc_ = 0; |
| 305 old_generation_allocation_in_bytes_since_gc_ = 0; |
291 } | 306 } |
292 | 307 |
293 | 308 |
294 void GCTracer::AddContextDisposalTime(double time) { | 309 void GCTracer::AddContextDisposalTime(double time) { |
295 context_disposal_events_.push_front(ContextDisposalEvent(time)); | 310 context_disposal_events_.push_front(ContextDisposalEvent(time)); |
296 } | 311 } |
297 | 312 |
298 | 313 |
299 void GCTracer::AddSurvivalRatio(double promotion_ratio) { | 314 void GCTracer::AddSurvivalRatio(double promotion_ratio) { |
300 survival_events_.push_front(SurvivalEvent(promotion_ratio)); | 315 survival_events_.push_front(SurvivalEvent(promotion_ratio)); |
(...skipping 275 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
576 } | 591 } |
577 | 592 |
578 if (durations == 0.0) return 0; | 593 if (durations == 0.0) return 0; |
579 | 594 |
580 return static_cast<intptr_t>(bytes / durations); | 595 return static_cast<intptr_t>(bytes / durations); |
581 } | 596 } |
582 | 597 |
583 | 598 |
584 size_t GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond() const { | 599 size_t GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond() const { |
585 size_t bytes = new_space_allocation_in_bytes_since_gc_; | 600 size_t bytes = new_space_allocation_in_bytes_since_gc_; |
586 double durations = new_space_allocation_duration_since_gc_; | 601 double durations = allocation_duration_since_gc_; |
587 AllocationEventBuffer::const_iterator iter = allocation_events_.begin(); | 602 AllocationEventBuffer::const_iterator iter = |
| 603 new_space_allocation_events_.begin(); |
588 const size_t max_bytes = static_cast<size_t>(-1); | 604 const size_t max_bytes = static_cast<size_t>(-1); |
589 while (iter != allocation_events_.end() && bytes < max_bytes - bytes) { | 605 while (iter != new_space_allocation_events_.end() && |
| 606 bytes < max_bytes - bytes) { |
590 bytes += iter->allocation_in_bytes_; | 607 bytes += iter->allocation_in_bytes_; |
591 durations += iter->duration_; | 608 durations += iter->duration_; |
592 ++iter; | 609 ++iter; |
593 } | 610 } |
594 | 611 |
595 if (durations == 0.0) return 0; | 612 if (durations == 0.0) return 0; |
596 | 613 |
597 return static_cast<size_t>(bytes / durations + 0.5); | 614 return static_cast<size_t>(bytes / durations + 0.5); |
598 } | 615 } |
599 | 616 |
600 | 617 |
601 size_t GCTracer::NewSpaceAllocatedBytesInLast(double time_ms) const { | 618 size_t GCTracer::AllocatedBytesInLast(double time_ms) const { |
602 size_t bytes = new_space_allocation_in_bytes_since_gc_; | 619 size_t bytes = new_space_allocation_in_bytes_since_gc_ + |
603 double durations = new_space_allocation_duration_since_gc_; | 620 old_generation_allocation_in_bytes_since_gc_; |
| 621 double durations = allocation_duration_since_gc_; |
604 AllocationEventBuffer::const_iterator iter = allocation_events_.begin(); | 622 AllocationEventBuffer::const_iterator iter = allocation_events_.begin(); |
605 const size_t max_bytes = static_cast<size_t>(-1); | 623 const size_t max_bytes = static_cast<size_t>(-1); |
606 while (iter != allocation_events_.end() && bytes < max_bytes - bytes && | 624 while (iter != allocation_events_.end() && bytes < max_bytes - bytes && |
607 durations < time_ms) { | 625 durations < time_ms) { |
608 bytes += iter->allocation_in_bytes_; | 626 bytes += iter->allocation_in_bytes_; |
609 durations += iter->duration_; | 627 durations += iter->duration_; |
610 ++iter; | 628 ++iter; |
611 } | 629 } |
612 | 630 |
613 if (durations == 0.0) return 0; | 631 if (durations == 0.0) return 0; |
614 | 632 |
615 bytes = static_cast<size_t>(bytes * (time_ms / durations) + 0.5); | 633 bytes = static_cast<size_t>(bytes * (time_ms / durations) + 0.5); |
616 // Return at least 1 since 0 means "no data". | 634 // Return at least 1 since 0 means "no data". |
617 return std::max<size_t>(bytes, 1); | 635 return std::max<size_t>(bytes, 1); |
618 } | 636 } |
619 | 637 |
620 | 638 |
621 size_t GCTracer::CurrentNewSpaceAllocationThroughputInBytesPerMillisecond() | 639 size_t GCTracer::CurrentAllocationThroughputInBytesPerMillisecond() const { |
622 const { | |
623 static const double kThroughputTimeFrame = 5000; | 640 static const double kThroughputTimeFrame = 5000; |
624 size_t allocated_bytes = NewSpaceAllocatedBytesInLast(kThroughputTimeFrame); | 641 size_t allocated_bytes = AllocatedBytesInLast(kThroughputTimeFrame); |
625 if (allocated_bytes == 0) return 0; | 642 if (allocated_bytes == 0) return 0; |
626 return static_cast<size_t>((allocated_bytes / kThroughputTimeFrame) + 1); | 643 return static_cast<size_t>((allocated_bytes / kThroughputTimeFrame) + 1); |
627 } | 644 } |
628 | 645 |
629 | 646 |
630 double GCTracer::ContextDisposalRateInMilliseconds() const { | 647 double GCTracer::ContextDisposalRateInMilliseconds() const { |
631 if (context_disposal_events_.size() < kRingBufferMaxSize) return 0.0; | 648 if (context_disposal_events_.size() < kRingBufferMaxSize) return 0.0; |
632 | 649 |
633 double begin = base::OS::TimeCurrentMillis(); | 650 double begin = base::OS::TimeCurrentMillis(); |
634 double end = 0.0; | 651 double end = 0.0; |
(...skipping 23 matching lines...) Expand all Loading... |
658 | 675 |
659 | 676 |
660 bool GCTracer::SurvivalEventsRecorded() const { | 677 bool GCTracer::SurvivalEventsRecorded() const { |
661 return survival_events_.size() > 0; | 678 return survival_events_.size() > 0; |
662 } | 679 } |
663 | 680 |
664 | 681 |
665 void GCTracer::ResetSurvivalEvents() { survival_events_.reset(); } | 682 void GCTracer::ResetSurvivalEvents() { survival_events_.reset(); } |
666 } | 683 } |
667 } // namespace v8::internal | 684 } // namespace v8::internal |
OLD | NEW |