Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(168)

Side by Side Diff: src/heap/gc-tracer.cc

Issue 1830723004: Refactor the ring buffer in GCTracer. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/gc-tracer.h" 5 #include "src/heap/gc-tracer.h"
6 6
7 #include "src/counters.h" 7 #include "src/counters.h"
8 #include "src/heap/heap-inl.h" 8 #include "src/heap/heap-inl.h"
9 #include "src/isolate.h" 9 #include "src/isolate.h"
10 10
(...skipping 27 matching lines...) Expand all
38 DCHECK(scope_ < NUMBER_OF_SCOPES); // scope_ is unsigned. 38 DCHECK(scope_ < NUMBER_OF_SCOPES); // scope_ is unsigned.
39 tracer_->current_.scopes[scope_] += 39 tracer_->current_.scopes[scope_] +=
40 tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_; 40 tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_;
41 // TODO(cbruni): remove once we fully moved to a trace-based system. 41 // TODO(cbruni): remove once we fully moved to a trace-based system.
42 if (FLAG_runtime_call_stats) { 42 if (FLAG_runtime_call_stats) {
43 tracer_->heap_->isolate()->counters()->runtime_call_stats()->Leave(&timer_); 43 tracer_->heap_->isolate()->counters()->runtime_call_stats()->Leave(&timer_);
44 } 44 }
45 } 45 }
46 46
47 47
48 GCTracer::AllocationEvent::AllocationEvent(double duration,
49 size_t allocation_in_bytes) {
50 duration_ = duration;
51 allocation_in_bytes_ = allocation_in_bytes;
52 }
53
54
55 GCTracer::ContextDisposalEvent::ContextDisposalEvent(double time) {
56 time_ = time;
57 }
58
59
60 GCTracer::SurvivalEvent::SurvivalEvent(double promotion_ratio) {
61 promotion_ratio_ = promotion_ratio;
62 }
63
64
65 GCTracer::Event::Event(Type type, const char* gc_reason, 48 GCTracer::Event::Event(Type type, const char* gc_reason,
66 const char* collector_reason) 49 const char* collector_reason)
67 : type(type), 50 : type(type),
68 gc_reason(gc_reason), 51 gc_reason(gc_reason),
69 collector_reason(collector_reason), 52 collector_reason(collector_reason),
70 start_time(0.0), 53 start_time(0.0),
71 end_time(0.0), 54 end_time(0.0),
72 reduce_memory(false), 55 reduce_memory(false),
73 start_object_size(0), 56 start_object_size(0),
74 end_object_size(0), 57 end_object_size(0),
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
195 start_time, used_memory); 178 start_time, used_memory);
196 // TODO(cbruni): remove once we fully moved to a trace-based system. 179 // TODO(cbruni): remove once we fully moved to a trace-based system.
197 if (FLAG_runtime_call_stats) { 180 if (FLAG_runtime_call_stats) {
198 RuntimeCallStats* stats = 181 RuntimeCallStats* stats =
199 heap_->isolate()->counters()->runtime_call_stats(); 182 heap_->isolate()->counters()->runtime_call_stats();
200 timer_.Initialize(&stats->GC, stats->current_timer()); 183 timer_.Initialize(&stats->GC, stats->current_timer());
201 stats->Enter(&timer_); 184 stats->Enter(&timer_);
202 } 185 }
203 } 186 }
204 187
205
206 void GCTracer::Stop(GarbageCollector collector) { 188 void GCTracer::Stop(GarbageCollector collector) {
207 start_counter_--; 189 start_counter_--;
208 if (start_counter_ != 0) { 190 if (start_counter_ != 0) {
209 Output("[Finished reentrant %s during %s.]\n", 191 Output("[Finished reentrant %s during %s.]\n",
210 collector == SCAVENGER ? "Scavenge" : "Mark-sweep", 192 collector == SCAVENGER ? "Scavenge" : "Mark-sweep",
211 current_.TypeName(false)); 193 current_.TypeName(false));
212 return; 194 return;
213 } 195 }
214 196
215 DCHECK(start_counter_ >= 0); 197 DCHECK(start_counter_ >= 0);
(...skipping 10 matching lines...) Expand all
226 208
227 AddAllocation(current_.end_time); 209 AddAllocation(current_.end_time);
228 210
229 int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB); 211 int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB);
230 int used_memory = static_cast<int>(current_.end_object_size / KB); 212 int used_memory = static_cast<int>(current_.end_object_size / KB);
231 heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample( 213 heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample(
232 current_.end_time, committed_memory); 214 current_.end_time, committed_memory);
233 heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample( 215 heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample(
234 current_.end_time, used_memory); 216 current_.end_time, used_memory);
235 217
218 double duration = current_.end_time - current_.start_time;
236 if (current_.type == Event::SCAVENGER) { 219 if (current_.type == Event::SCAVENGER) {
237 current_.incremental_marking_steps = 220 current_.incremental_marking_steps =
238 current_.cumulative_incremental_marking_steps - 221 current_.cumulative_incremental_marking_steps -
239 previous_.cumulative_incremental_marking_steps; 222 previous_.cumulative_incremental_marking_steps;
240 current_.incremental_marking_bytes = 223 current_.incremental_marking_bytes =
241 current_.cumulative_incremental_marking_bytes - 224 current_.cumulative_incremental_marking_bytes -
242 previous_.cumulative_incremental_marking_bytes; 225 previous_.cumulative_incremental_marking_bytes;
243 current_.incremental_marking_duration = 226 current_.incremental_marking_duration =
244 current_.cumulative_incremental_marking_duration - 227 current_.cumulative_incremental_marking_duration -
245 previous_.cumulative_incremental_marking_duration; 228 previous_.cumulative_incremental_marking_duration;
246 current_.pure_incremental_marking_duration = 229 current_.pure_incremental_marking_duration =
247 current_.cumulative_pure_incremental_marking_duration - 230 current_.cumulative_pure_incremental_marking_duration -
248 previous_.cumulative_pure_incremental_marking_duration; 231 previous_.cumulative_pure_incremental_marking_duration;
249 scavenger_events_.push_front(current_); 232 past_scavenges_total_.Push(
233 MakeBytesAndDuration(current_.new_space_object_size, duration));
234 past_scavenges_survived_.Push(MakeBytesAndDuration(
235 current_.survived_new_space_object_size, duration));
250 } else if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) { 236 } else if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
251 current_.incremental_marking_steps = 237 current_.incremental_marking_steps =
252 current_.cumulative_incremental_marking_steps - 238 current_.cumulative_incremental_marking_steps -
253 previous_incremental_mark_compactor_event_ 239 previous_incremental_mark_compactor_event_
254 .cumulative_incremental_marking_steps; 240 .cumulative_incremental_marking_steps;
255 current_.incremental_marking_bytes = 241 current_.incremental_marking_bytes =
256 current_.cumulative_incremental_marking_bytes - 242 current_.cumulative_incremental_marking_bytes -
257 previous_incremental_mark_compactor_event_ 243 previous_incremental_mark_compactor_event_
258 .cumulative_incremental_marking_bytes; 244 .cumulative_incremental_marking_bytes;
259 current_.incremental_marking_duration = 245 current_.incremental_marking_duration =
260 current_.cumulative_incremental_marking_duration - 246 current_.cumulative_incremental_marking_duration -
261 previous_incremental_mark_compactor_event_ 247 previous_incremental_mark_compactor_event_
262 .cumulative_incremental_marking_duration; 248 .cumulative_incremental_marking_duration;
263 current_.pure_incremental_marking_duration = 249 current_.pure_incremental_marking_duration =
264 current_.cumulative_pure_incremental_marking_duration - 250 current_.cumulative_pure_incremental_marking_duration -
265 previous_incremental_mark_compactor_event_ 251 previous_incremental_mark_compactor_event_
266 .cumulative_pure_incremental_marking_duration; 252 .cumulative_pure_incremental_marking_duration;
267 longest_incremental_marking_step_ = 0.0; 253 longest_incremental_marking_step_ = 0.0;
268 incremental_mark_compactor_events_.push_front(current_); 254 past_incremental_marking_steps_.Push(
255 MakeBytesAndDuration(current_.incremental_marking_bytes,
256 current_.pure_incremental_marking_duration));
257 past_incremental_mark_compacts_.Push(
258 MakeBytesAndDuration(current_.start_object_size, duration));
269 combined_mark_compact_speed_cache_ = 0.0; 259 combined_mark_compact_speed_cache_ = 0.0;
270 } else { 260 } else {
271 DCHECK(current_.incremental_marking_bytes == 0); 261 DCHECK(current_.incremental_marking_bytes == 0);
272 DCHECK(current_.incremental_marking_duration == 0); 262 DCHECK(current_.incremental_marking_duration == 0);
273 DCHECK(current_.pure_incremental_marking_duration == 0); 263 DCHECK(current_.pure_incremental_marking_duration == 0);
274 longest_incremental_marking_step_ = 0.0; 264 longest_incremental_marking_step_ = 0.0;
275 mark_compactor_events_.push_front(current_); 265 past_mark_compacts_.Push(
266 MakeBytesAndDuration(current_.start_object_size, duration));
276 combined_mark_compact_speed_cache_ = 0.0; 267 combined_mark_compact_speed_cache_ = 0.0;
277 } 268 }
278 269
279 // TODO(ernstm): move the code below out of GCTracer. 270 // TODO(ernstm): move the code below out of GCTracer.
280 271
281 double duration = current_.end_time - current_.start_time;
282 double spent_in_mutator = Max(current_.start_time - previous_.end_time, 0.0); 272 double spent_in_mutator = Max(current_.start_time - previous_.end_time, 0.0);
283 273
284 heap_->UpdateCumulativeGCStatistics(duration, spent_in_mutator, 274 heap_->UpdateCumulativeGCStatistics(duration, spent_in_mutator,
285 current_.scopes[Scope::MC_MARK]); 275 current_.scopes[Scope::MC_MARK]);
286 276
287 if (current_.type == Event::SCAVENGER && FLAG_trace_gc_ignore_scavenger) 277 if (current_.type == Event::SCAVENGER && FLAG_trace_gc_ignore_scavenger)
288 return; 278 return;
289 279
290 if (FLAG_trace_gc_nvp) 280 if (FLAG_trace_gc_nvp)
291 PrintNVP(); 281 PrintNVP();
(...skipping 36 matching lines...) Expand 10 before | Expand all | Expand 10 after
328 old_generation_allocation_counter_bytes_ = old_generation_counter_bytes; 318 old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
329 allocation_duration_since_gc_ += duration; 319 allocation_duration_since_gc_ += duration;
330 new_space_allocation_in_bytes_since_gc_ += new_space_allocated_bytes; 320 new_space_allocation_in_bytes_since_gc_ += new_space_allocated_bytes;
331 old_generation_allocation_in_bytes_since_gc_ += 321 old_generation_allocation_in_bytes_since_gc_ +=
332 old_generation_allocated_bytes; 322 old_generation_allocated_bytes;
333 } 323 }
334 324
335 325
336 void GCTracer::AddAllocation(double current_ms) { 326 void GCTracer::AddAllocation(double current_ms) {
337 allocation_time_ms_ = current_ms; 327 allocation_time_ms_ = current_ms;
338 new_space_allocation_events_.push_front(AllocationEvent( 328 past_new_generation_allocations_.Push(MakeBytesAndDuration(
339 allocation_duration_since_gc_, new_space_allocation_in_bytes_since_gc_)); 329 new_space_allocation_in_bytes_since_gc_, allocation_duration_since_gc_));
340 old_generation_allocation_events_.push_front( 330 past_old_generation_allocations_.Push(
341 AllocationEvent(allocation_duration_since_gc_, 331 MakeBytesAndDuration(old_generation_allocation_in_bytes_since_gc_,
342 old_generation_allocation_in_bytes_since_gc_)); 332 allocation_duration_since_gc_));
343 allocation_duration_since_gc_ = 0; 333 allocation_duration_since_gc_ = 0;
344 new_space_allocation_in_bytes_since_gc_ = 0; 334 new_space_allocation_in_bytes_since_gc_ = 0;
345 old_generation_allocation_in_bytes_since_gc_ = 0; 335 old_generation_allocation_in_bytes_since_gc_ = 0;
346 } 336 }
347 337
348 338
349 void GCTracer::AddContextDisposalTime(double time) { 339 void GCTracer::AddContextDisposalTime(double time) {
350 context_disposal_events_.push_front(ContextDisposalEvent(time)); 340 past_context_disposal_times_.Push(time);
351 } 341 }
352 342
353 343
354 void GCTracer::AddCompactionEvent(double duration, 344 void GCTracer::AddCompactionEvent(double duration,
355 intptr_t live_bytes_compacted) { 345 intptr_t live_bytes_compacted) {
356 compaction_events_.push_front( 346 past_compactions_.Push(MakeBytesAndDuration(live_bytes_compacted, duration));
357 CompactionEvent(duration, live_bytes_compacted));
358 } 347 }
359 348
360 349
361 void GCTracer::AddSurvivalRatio(double promotion_ratio) { 350 void GCTracer::AddSurvivalRatio(double promotion_ratio) {
362 survival_events_.push_front(SurvivalEvent(promotion_ratio)); 351 past_survival_ratios_.Push(promotion_ratio);
363 } 352 }
364 353
365 354
366 void GCTracer::AddIncrementalMarkingStep(double duration, intptr_t bytes) { 355 void GCTracer::AddIncrementalMarkingStep(double duration, intptr_t bytes) {
367 cumulative_incremental_marking_steps_++; 356 cumulative_incremental_marking_steps_++;
368 cumulative_incremental_marking_bytes_ += bytes; 357 cumulative_incremental_marking_bytes_ += bytes;
369 cumulative_incremental_marking_duration_ += duration; 358 cumulative_incremental_marking_duration_ += duration;
370 longest_incremental_marking_step_ = 359 longest_incremental_marking_step_ =
371 Max(longest_incremental_marking_step_, duration); 360 Max(longest_incremental_marking_step_, duration);
372 cumulative_marking_duration_ += duration; 361 cumulative_marking_duration_ += duration;
(...skipping 288 matching lines...) Expand 10 before | Expand all | Expand 10 after
661 ContextDisposalRateInMilliseconds(), 650 ContextDisposalRateInMilliseconds(),
662 CompactionSpeedInBytesPerMillisecond()); 651 CompactionSpeedInBytesPerMillisecond());
663 break; 652 break;
664 case Event::START: 653 case Event::START:
665 break; 654 break;
666 default: 655 default:
667 UNREACHABLE(); 656 UNREACHABLE();
668 } 657 }
669 } 658 }
670 659
671 660 int GCTracer::AverageSpeed(const RingBuffer<BytesAndDuration>& buffer,
Hannes Payer (out of office) 2016/03/29 11:37:38 How about returning double here?
ulan 2016/03/29 12:13:07 Yes, I am going to change all function to return d
Hannes Payer (out of office) 2016/03/29 12:18:34 Ack, cool!
672 double GCTracer::MeanDuration(const EventBuffer& events) const { 661 const BytesAndDuration& initial, double time_ms) {
673 if (events.empty()) return 0.0; 662 BytesAndDuration sum = buffer.Sum(
674 663 [time_ms](BytesAndDuration a, BytesAndDuration b) {
675 double mean = 0.0; 664 if (time_ms != 0 && a.second >= time_ms) return a;
676 EventBuffer::const_iterator iter = events.begin(); 665 return std::make_pair(a.first + b.first, a.second + b.second);
677 while (iter != events.end()) { 666 },
678 mean += iter->end_time - iter->start_time; 667 initial);
679 ++iter; 668 uint64_t bytes = sum.first;
680 } 669 double durations = sum.second;
681 670 if (durations == 0.0) return 0;
682 return mean / events.size(); 671 double speed = bytes / durations + 0.5;
672 const int max_speed = 1024 * MB;
673 const int min_speed = 1;
674 if (speed >= max_speed) return max_speed;
675 if (speed <= min_speed) return min_speed;
676 return static_cast<int>(speed);
683 } 677 }
684 678
685 679 int GCTracer::AverageSpeed(const RingBuffer<BytesAndDuration>& buffer) {
686 double GCTracer::MaxDuration(const EventBuffer& events) const { 680 return AverageSpeed(buffer, MakeBytesAndDuration(0, 0), 0);
687 if (events.empty()) return 0.0;
688
689 double maximum = 0.0f;
690 EventBuffer::const_iterator iter = events.begin();
691 while (iter != events.end()) {
692 maximum = Max(iter->end_time - iter->start_time, maximum);
693 ++iter;
694 }
695
696 return maximum;
697 } 681 }
698 682
699
700 intptr_t GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const { 683 intptr_t GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
701 if (cumulative_incremental_marking_duration_ == 0.0) return 0; 684 if (cumulative_incremental_marking_duration_ == 0.0) return 0;
702
703 // We haven't completed an entire round of incremental marking, yet. 685 // We haven't completed an entire round of incremental marking, yet.
704 // Use data from GCTracer instead of data from event buffers. 686 // Use data from GCTracer instead of data from event buffers.
705 if (incremental_mark_compactor_events_.empty()) { 687 if (past_incremental_marking_steps_.Count() == 0) {
706 return static_cast<intptr_t>(cumulative_incremental_marking_bytes_ / 688 return static_cast<intptr_t>(cumulative_incremental_marking_bytes_ /
707 cumulative_pure_incremental_marking_duration_); 689 cumulative_pure_incremental_marking_duration_);
708 } 690 }
709 691 return AverageSpeed(past_incremental_marking_steps_);
710 intptr_t bytes = 0;
711 double durations = 0.0;
712 EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
713 while (iter != incremental_mark_compactor_events_.end()) {
714 bytes += iter->incremental_marking_bytes;
715 durations += iter->pure_incremental_marking_duration;
716 ++iter;
717 }
718
719 if (durations == 0.0) return 0;
720 // Make sure the result is at least 1.
721 return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
722 } 692 }
723 693
724
725 intptr_t GCTracer::ScavengeSpeedInBytesPerMillisecond( 694 intptr_t GCTracer::ScavengeSpeedInBytesPerMillisecond(
726 ScavengeSpeedMode mode) const { 695 ScavengeSpeedMode mode) const {
727 intptr_t bytes = 0; 696 if (mode == kForAllObjects) {
728 double durations = 0.0; 697 return AverageSpeed(past_scavenges_total_);
729 EventBuffer::const_iterator iter = scavenger_events_.begin(); 698 } else {
730 while (iter != scavenger_events_.end()) { 699 return AverageSpeed(past_scavenges_survived_);
731 bytes += mode == kForAllObjects ? iter->new_space_object_size
732 : iter->survived_new_space_object_size;
733 durations += iter->end_time - iter->start_time;
734 ++iter;
735 } 700 }
736
737 if (durations == 0.0) return 0;
738 // Make sure the result is at least 1.
739 return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
740 } 701 }
741 702
742
743 intptr_t GCTracer::CompactionSpeedInBytesPerMillisecond() const { 703 intptr_t GCTracer::CompactionSpeedInBytesPerMillisecond() const {
744 if (compaction_events_.size() == 0) return 0; 704 return AverageSpeed(past_compactions_);
745 intptr_t bytes = 0;
746 double durations = 0.0;
747 CompactionEventBuffer::const_iterator iter = compaction_events_.begin();
748 while (iter != compaction_events_.end()) {
749 bytes += iter->live_bytes_compacted;
750 durations += iter->duration;
751 ++iter;
752 }
753
754 if (durations == 0.0) return 0;
755 // Make sure the result is at least 1.
756 return Max<intptr_t>(static_cast<intptr_t>(bytes / durations + 0.5), 1);
757 } 705 }
758 706
759
760 intptr_t GCTracer::MarkCompactSpeedInBytesPerMillisecond() const { 707 intptr_t GCTracer::MarkCompactSpeedInBytesPerMillisecond() const {
761 intptr_t bytes = 0; 708 return AverageSpeed(past_mark_compacts_);
762 double durations = 0.0;
763 EventBuffer::const_iterator iter = mark_compactor_events_.begin();
764 while (iter != mark_compactor_events_.end()) {
765 bytes += iter->start_object_size;
766 durations += iter->end_time - iter->start_time;
767 ++iter;
768 }
769
770 if (durations == 0.0) return 0;
771 // Make sure the result is at least 1.
772 return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
773 } 709 }
774 710
775
776 intptr_t GCTracer::FinalIncrementalMarkCompactSpeedInBytesPerMillisecond() 711 intptr_t GCTracer::FinalIncrementalMarkCompactSpeedInBytesPerMillisecond()
777 const { 712 const {
778 intptr_t bytes = 0; 713 return AverageSpeed(past_incremental_mark_compacts_);
779 double durations = 0.0;
780 EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
781 while (iter != incremental_mark_compactor_events_.end()) {
782 bytes += iter->start_object_size;
783 durations += iter->end_time - iter->start_time;
784 ++iter;
785 }
786
787 if (durations == 0.0) return 0;
788 // Make sure the result is at least 1.
789 return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
790 } 714 }
791 715
792
793 double GCTracer::CombinedMarkCompactSpeedInBytesPerMillisecond() { 716 double GCTracer::CombinedMarkCompactSpeedInBytesPerMillisecond() {
794 if (combined_mark_compact_speed_cache_ > 0) 717 if (combined_mark_compact_speed_cache_ > 0)
795 return combined_mark_compact_speed_cache_; 718 return combined_mark_compact_speed_cache_;
796 const double kMinimumMarkingSpeed = 0.5; 719 const double kMinimumMarkingSpeed = 0.5;
797 double speed1 = 720 double speed1 =
798 static_cast<double>(IncrementalMarkingSpeedInBytesPerMillisecond()); 721 static_cast<double>(IncrementalMarkingSpeedInBytesPerMillisecond());
799 double speed2 = static_cast<double>( 722 double speed2 = static_cast<double>(
800 FinalIncrementalMarkCompactSpeedInBytesPerMillisecond()); 723 FinalIncrementalMarkCompactSpeedInBytesPerMillisecond());
801 if (speed1 < kMinimumMarkingSpeed || speed2 < kMinimumMarkingSpeed) { 724 if (speed1 < kMinimumMarkingSpeed || speed2 < kMinimumMarkingSpeed) {
802 // No data for the incremental marking speed. 725 // No data for the incremental marking speed.
803 // Return the non-incremental mark-compact speed. 726 // Return the non-incremental mark-compact speed.
804 combined_mark_compact_speed_cache_ = 727 combined_mark_compact_speed_cache_ =
805 static_cast<double>(MarkCompactSpeedInBytesPerMillisecond()); 728 static_cast<double>(MarkCompactSpeedInBytesPerMillisecond());
806 } else { 729 } else {
807 // Combine the speed of incremental step and the speed of the final step. 730 // Combine the speed of incremental step and the speed of the final step.
808 // 1 / (1 / speed1 + 1 / speed2) = speed1 * speed2 / (speed1 + speed2). 731 // 1 / (1 / speed1 + 1 / speed2) = speed1 * speed2 / (speed1 + speed2).
809 combined_mark_compact_speed_cache_ = speed1 * speed2 / (speed1 + speed2); 732 combined_mark_compact_speed_cache_ = speed1 * speed2 / (speed1 + speed2);
810 } 733 }
811 return combined_mark_compact_speed_cache_; 734 return combined_mark_compact_speed_cache_;
812 } 735 }
813 736
814
815 size_t GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond( 737 size_t GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond(
816 double time_ms) const { 738 double time_ms) const {
817 size_t bytes = new_space_allocation_in_bytes_since_gc_; 739 size_t bytes = new_space_allocation_in_bytes_since_gc_;
818 double durations = allocation_duration_since_gc_; 740 double durations = allocation_duration_since_gc_;
819 AllocationEventBuffer::const_iterator iter = 741 return static_cast<size_t>(
820 new_space_allocation_events_.begin(); 742 AverageSpeed(past_new_generation_allocations_,
821 const size_t max_bytes = static_cast<size_t>(-1); 743 MakeBytesAndDuration(bytes, durations), time_ms));
822 while (iter != new_space_allocation_events_.end() &&
823 bytes < max_bytes - bytes && (time_ms == 0 || durations < time_ms)) {
824 bytes += iter->allocation_in_bytes_;
825 durations += iter->duration_;
826 ++iter;
827 }
828
829 if (durations == 0.0) return 0;
830 // Make sure the result is at least 1.
831 return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
832 } 744 }
833 745
834
835 size_t GCTracer::OldGenerationAllocationThroughputInBytesPerMillisecond( 746 size_t GCTracer::OldGenerationAllocationThroughputInBytesPerMillisecond(
836 double time_ms) const { 747 double time_ms) const {
837 size_t bytes = old_generation_allocation_in_bytes_since_gc_; 748 size_t bytes = old_generation_allocation_in_bytes_since_gc_;
838 double durations = allocation_duration_since_gc_; 749 double durations = allocation_duration_since_gc_;
839 AllocationEventBuffer::const_iterator iter = 750 return static_cast<size_t>(
840 old_generation_allocation_events_.begin(); 751 AverageSpeed(past_old_generation_allocations_,
841 const size_t max_bytes = static_cast<size_t>(-1); 752 MakeBytesAndDuration(bytes, durations), time_ms));
842 while (iter != old_generation_allocation_events_.end() &&
843 bytes < max_bytes - bytes && (time_ms == 0 || durations < time_ms)) {
844 bytes += iter->allocation_in_bytes_;
845 durations += iter->duration_;
846 ++iter;
847 }
848
849 if (durations == 0.0) return 0;
850 // Make sure the result is at least 1.
851 return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
852 } 753 }
853 754
854
855 size_t GCTracer::AllocationThroughputInBytesPerMillisecond( 755 size_t GCTracer::AllocationThroughputInBytesPerMillisecond(
856 double time_ms) const { 756 double time_ms) const {
857 return NewSpaceAllocationThroughputInBytesPerMillisecond(time_ms) + 757 return NewSpaceAllocationThroughputInBytesPerMillisecond(time_ms) +
858 OldGenerationAllocationThroughputInBytesPerMillisecond(time_ms); 758 OldGenerationAllocationThroughputInBytesPerMillisecond(time_ms);
859 } 759 }
860 760
861 761
862 size_t GCTracer::CurrentAllocationThroughputInBytesPerMillisecond() const { 762 size_t GCTracer::CurrentAllocationThroughputInBytesPerMillisecond() const {
863 return AllocationThroughputInBytesPerMillisecond(kThroughputTimeFrameMs); 763 return AllocationThroughputInBytesPerMillisecond(kThroughputTimeFrameMs);
864 } 764 }
865 765
866 766
867 size_t GCTracer::CurrentOldGenerationAllocationThroughputInBytesPerMillisecond() 767 size_t GCTracer::CurrentOldGenerationAllocationThroughputInBytesPerMillisecond()
868 const { 768 const {
869 return OldGenerationAllocationThroughputInBytesPerMillisecond( 769 return OldGenerationAllocationThroughputInBytesPerMillisecond(
870 kThroughputTimeFrameMs); 770 kThroughputTimeFrameMs);
871 } 771 }
872 772
873
874 double GCTracer::ContextDisposalRateInMilliseconds() const { 773 double GCTracer::ContextDisposalRateInMilliseconds() const {
875 if (context_disposal_events_.size() < kRingBufferMaxSize) return 0.0; 774 if (past_context_disposal_times_.Count() < past_context_disposal_times_.kSize)
876 775 return 0.0;
877 double begin = heap_->MonotonicallyIncreasingTimeInMs(); 776 double begin = heap_->MonotonicallyIncreasingTimeInMs();
878 double end = 0.0; 777 double end = past_context_disposal_times_.Sum(
879 ContextDisposalEventBuffer::const_iterator iter = 778 [](double a, double b) { return b; }, 0.0);
880 context_disposal_events_.begin(); 779 return (begin - end) / past_context_disposal_times_.Count();
881 while (iter != context_disposal_events_.end()) {
882 end = iter->time_;
883 ++iter;
884 }
885
886 return (begin - end) / context_disposal_events_.size();
887 } 780 }
888 781
889
890 double GCTracer::AverageSurvivalRatio() const { 782 double GCTracer::AverageSurvivalRatio() const {
891 if (survival_events_.size() == 0) return 0.0; 783 if (past_survival_ratios_.Count() == 0) return 0.0;
892 784 double sum =
893 double sum_of_rates = 0.0; 785 past_survival_ratios_.Sum([](double a, double b) { return a + b; }, 0.0);
894 SurvivalEventBuffer::const_iterator iter = survival_events_.begin(); 786 return sum / past_survival_ratios_.Count();
895 while (iter != survival_events_.end()) {
896 sum_of_rates += iter->promotion_ratio_;
897 ++iter;
898 }
899
900 return sum_of_rates / static_cast<double>(survival_events_.size());
901 } 787 }
902 788
903
904 bool GCTracer::SurvivalEventsRecorded() const { 789 bool GCTracer::SurvivalEventsRecorded() const {
905 return survival_events_.size() > 0; 790 return past_survival_ratios_.Count() > 0;
906 } 791 }
907 792
908 793 void GCTracer::ResetSurvivalEvents() { past_survival_ratios_.Reset(); }
909 void GCTracer::ResetSurvivalEvents() { survival_events_.reset(); }
910 } // namespace internal 794 } // namespace internal
911 } // namespace v8 795 } // namespace v8
OLDNEW
« src/heap/gc-tracer.h ('K') | « src/heap/gc-tracer.h ('k') | test/cctest/cctest.gyp » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698