Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(372)

Side by Side Diff: src/heap/gc-tracer.cc

Issue 1830723004: Refactor the ring buffer in GCTracer. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Fix cast Created 4 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
« no previous file with comments | « src/heap/gc-tracer.h ('k') | test/cctest/cctest.gyp » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/heap/gc-tracer.h" 5 #include "src/heap/gc-tracer.h"
6 6
7 #include "src/counters.h" 7 #include "src/counters.h"
8 #include "src/heap/heap-inl.h" 8 #include "src/heap/heap-inl.h"
9 #include "src/isolate.h" 9 #include "src/isolate.h"
10 10
(...skipping 27 matching lines...) Expand all
38 DCHECK(scope_ < NUMBER_OF_SCOPES); // scope_ is unsigned. 38 DCHECK(scope_ < NUMBER_OF_SCOPES); // scope_ is unsigned.
39 tracer_->current_.scopes[scope_] += 39 tracer_->current_.scopes[scope_] +=
40 tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_; 40 tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_;
41 // TODO(cbruni): remove once we fully moved to a trace-based system. 41 // TODO(cbruni): remove once we fully moved to a trace-based system.
42 if (FLAG_runtime_call_stats) { 42 if (FLAG_runtime_call_stats) {
43 tracer_->heap_->isolate()->counters()->runtime_call_stats()->Leave(&timer_); 43 tracer_->heap_->isolate()->counters()->runtime_call_stats()->Leave(&timer_);
44 } 44 }
45 } 45 }
46 46
47 47
48 GCTracer::AllocationEvent::AllocationEvent(double duration,
49 size_t allocation_in_bytes) {
50 duration_ = duration;
51 allocation_in_bytes_ = allocation_in_bytes;
52 }
53
54
55 GCTracer::ContextDisposalEvent::ContextDisposalEvent(double time) {
56 time_ = time;
57 }
58
59
60 GCTracer::SurvivalEvent::SurvivalEvent(double promotion_ratio) {
61 promotion_ratio_ = promotion_ratio;
62 }
63
64
65 GCTracer::Event::Event(Type type, const char* gc_reason, 48 GCTracer::Event::Event(Type type, const char* gc_reason,
66 const char* collector_reason) 49 const char* collector_reason)
67 : type(type), 50 : type(type),
68 gc_reason(gc_reason), 51 gc_reason(gc_reason),
69 collector_reason(collector_reason), 52 collector_reason(collector_reason),
70 start_time(0.0), 53 start_time(0.0),
71 end_time(0.0), 54 end_time(0.0),
72 reduce_memory(false), 55 reduce_memory(false),
73 start_object_size(0), 56 start_object_size(0),
74 end_object_size(0), 57 end_object_size(0),
(...skipping 120 matching lines...) Expand 10 before | Expand all | Expand 10 after
195 start_time, used_memory); 178 start_time, used_memory);
196 // TODO(cbruni): remove once we fully moved to a trace-based system. 179 // TODO(cbruni): remove once we fully moved to a trace-based system.
197 if (FLAG_runtime_call_stats) { 180 if (FLAG_runtime_call_stats) {
198 RuntimeCallStats* stats = 181 RuntimeCallStats* stats =
199 heap_->isolate()->counters()->runtime_call_stats(); 182 heap_->isolate()->counters()->runtime_call_stats();
200 timer_.Initialize(&stats->GC, stats->current_timer()); 183 timer_.Initialize(&stats->GC, stats->current_timer());
201 stats->Enter(&timer_); 184 stats->Enter(&timer_);
202 } 185 }
203 } 186 }
204 187
205
206 void GCTracer::Stop(GarbageCollector collector) { 188 void GCTracer::Stop(GarbageCollector collector) {
207 start_counter_--; 189 start_counter_--;
208 if (start_counter_ != 0) { 190 if (start_counter_ != 0) {
209 Output("[Finished reentrant %s during %s.]\n", 191 Output("[Finished reentrant %s during %s.]\n",
210 collector == SCAVENGER ? "Scavenge" : "Mark-sweep", 192 collector == SCAVENGER ? "Scavenge" : "Mark-sweep",
211 current_.TypeName(false)); 193 current_.TypeName(false));
212 return; 194 return;
213 } 195 }
214 196
215 DCHECK(start_counter_ >= 0); 197 DCHECK(start_counter_ >= 0);
(...skipping 10 matching lines...) Expand all
226 208
227 AddAllocation(current_.end_time); 209 AddAllocation(current_.end_time);
228 210
229 int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB); 211 int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB);
230 int used_memory = static_cast<int>(current_.end_object_size / KB); 212 int used_memory = static_cast<int>(current_.end_object_size / KB);
231 heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample( 213 heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample(
232 current_.end_time, committed_memory); 214 current_.end_time, committed_memory);
233 heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample( 215 heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample(
234 current_.end_time, used_memory); 216 current_.end_time, used_memory);
235 217
218 double duration = current_.end_time - current_.start_time;
236 if (current_.type == Event::SCAVENGER) { 219 if (current_.type == Event::SCAVENGER) {
237 current_.incremental_marking_steps = 220 current_.incremental_marking_steps =
238 current_.cumulative_incremental_marking_steps - 221 current_.cumulative_incremental_marking_steps -
239 previous_.cumulative_incremental_marking_steps; 222 previous_.cumulative_incremental_marking_steps;
240 current_.incremental_marking_bytes = 223 current_.incremental_marking_bytes =
241 current_.cumulative_incremental_marking_bytes - 224 current_.cumulative_incremental_marking_bytes -
242 previous_.cumulative_incremental_marking_bytes; 225 previous_.cumulative_incremental_marking_bytes;
243 current_.incremental_marking_duration = 226 current_.incremental_marking_duration =
244 current_.cumulative_incremental_marking_duration - 227 current_.cumulative_incremental_marking_duration -
245 previous_.cumulative_incremental_marking_duration; 228 previous_.cumulative_incremental_marking_duration;
246 current_.pure_incremental_marking_duration = 229 current_.pure_incremental_marking_duration =
247 current_.cumulative_pure_incremental_marking_duration - 230 current_.cumulative_pure_incremental_marking_duration -
248 previous_.cumulative_pure_incremental_marking_duration; 231 previous_.cumulative_pure_incremental_marking_duration;
249 scavenger_events_.push_front(current_); 232 recorded_scavenges_total_.Push(
233 MakeBytesAndDuration(current_.new_space_object_size, duration));
234 recorded_scavenges_survived_.Push(MakeBytesAndDuration(
235 current_.survived_new_space_object_size, duration));
250 } else if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) { 236 } else if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
251 current_.incremental_marking_steps = 237 current_.incremental_marking_steps =
252 current_.cumulative_incremental_marking_steps - 238 current_.cumulative_incremental_marking_steps -
253 previous_incremental_mark_compactor_event_ 239 previous_incremental_mark_compactor_event_
254 .cumulative_incremental_marking_steps; 240 .cumulative_incremental_marking_steps;
255 current_.incremental_marking_bytes = 241 current_.incremental_marking_bytes =
256 current_.cumulative_incremental_marking_bytes - 242 current_.cumulative_incremental_marking_bytes -
257 previous_incremental_mark_compactor_event_ 243 previous_incremental_mark_compactor_event_
258 .cumulative_incremental_marking_bytes; 244 .cumulative_incremental_marking_bytes;
259 current_.incremental_marking_duration = 245 current_.incremental_marking_duration =
260 current_.cumulative_incremental_marking_duration - 246 current_.cumulative_incremental_marking_duration -
261 previous_incremental_mark_compactor_event_ 247 previous_incremental_mark_compactor_event_
262 .cumulative_incremental_marking_duration; 248 .cumulative_incremental_marking_duration;
263 current_.pure_incremental_marking_duration = 249 current_.pure_incremental_marking_duration =
264 current_.cumulative_pure_incremental_marking_duration - 250 current_.cumulative_pure_incremental_marking_duration -
265 previous_incremental_mark_compactor_event_ 251 previous_incremental_mark_compactor_event_
266 .cumulative_pure_incremental_marking_duration; 252 .cumulative_pure_incremental_marking_duration;
267 longest_incremental_marking_step_ = 0.0; 253 longest_incremental_marking_step_ = 0.0;
268 incremental_mark_compactor_events_.push_front(current_); 254 recorded_incremental_marking_steps_.Push(
255 MakeBytesAndDuration(current_.incremental_marking_bytes,
256 current_.pure_incremental_marking_duration));
257 recorded_incremental_mark_compacts_.Push(
258 MakeBytesAndDuration(current_.start_object_size, duration));
269 combined_mark_compact_speed_cache_ = 0.0; 259 combined_mark_compact_speed_cache_ = 0.0;
270 } else { 260 } else {
271 DCHECK(current_.incremental_marking_bytes == 0); 261 DCHECK(current_.incremental_marking_bytes == 0);
272 DCHECK(current_.incremental_marking_duration == 0); 262 DCHECK(current_.incremental_marking_duration == 0);
273 DCHECK(current_.pure_incremental_marking_duration == 0); 263 DCHECK(current_.pure_incremental_marking_duration == 0);
274 longest_incremental_marking_step_ = 0.0; 264 longest_incremental_marking_step_ = 0.0;
275 mark_compactor_events_.push_front(current_); 265 recorded_mark_compacts_.Push(
266 MakeBytesAndDuration(current_.start_object_size, duration));
276 combined_mark_compact_speed_cache_ = 0.0; 267 combined_mark_compact_speed_cache_ = 0.0;
277 } 268 }
278 269
279 // TODO(ernstm): move the code below out of GCTracer. 270 // TODO(ernstm): move the code below out of GCTracer.
280 271
281 double duration = current_.end_time - current_.start_time;
282 double spent_in_mutator = Max(current_.start_time - previous_.end_time, 0.0); 272 double spent_in_mutator = Max(current_.start_time - previous_.end_time, 0.0);
283 273
284 heap_->UpdateCumulativeGCStatistics(duration, spent_in_mutator, 274 heap_->UpdateCumulativeGCStatistics(duration, spent_in_mutator,
285 current_.scopes[Scope::MC_MARK]); 275 current_.scopes[Scope::MC_MARK]);
286 276
287 if (current_.type == Event::SCAVENGER && FLAG_trace_gc_ignore_scavenger) 277 if (current_.type == Event::SCAVENGER && FLAG_trace_gc_ignore_scavenger)
288 return; 278 return;
289 279
290 if (FLAG_trace_gc_nvp) 280 if (FLAG_trace_gc_nvp)
291 PrintNVP(); 281 PrintNVP();
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
329 allocation_duration_since_gc_ += duration; 319 allocation_duration_since_gc_ += duration;
330 new_space_allocation_in_bytes_since_gc_ += new_space_allocated_bytes; 320 new_space_allocation_in_bytes_since_gc_ += new_space_allocated_bytes;
331 old_generation_allocation_in_bytes_since_gc_ += 321 old_generation_allocation_in_bytes_since_gc_ +=
332 old_generation_allocated_bytes; 322 old_generation_allocated_bytes;
333 } 323 }
334 324
335 325
336 void GCTracer::AddAllocation(double current_ms) { 326 void GCTracer::AddAllocation(double current_ms) {
337 allocation_time_ms_ = current_ms; 327 allocation_time_ms_ = current_ms;
338 if (allocation_duration_since_gc_ > 0) { 328 if (allocation_duration_since_gc_ > 0) {
339 new_space_allocation_events_.push_front( 329 recorded_new_generation_allocations_.Push(
340 AllocationEvent(allocation_duration_since_gc_, 330 MakeBytesAndDuration(new_space_allocation_in_bytes_since_gc_,
341 new_space_allocation_in_bytes_since_gc_)); 331 allocation_duration_since_gc_));
342 old_generation_allocation_events_.push_front( 332 recorded_old_generation_allocations_.Push(
343 AllocationEvent(allocation_duration_since_gc_, 333 MakeBytesAndDuration(old_generation_allocation_in_bytes_since_gc_,
344 old_generation_allocation_in_bytes_since_gc_)); 334 allocation_duration_since_gc_));
345 } 335 }
346 allocation_duration_since_gc_ = 0; 336 allocation_duration_since_gc_ = 0;
347 new_space_allocation_in_bytes_since_gc_ = 0; 337 new_space_allocation_in_bytes_since_gc_ = 0;
348 old_generation_allocation_in_bytes_since_gc_ = 0; 338 old_generation_allocation_in_bytes_since_gc_ = 0;
349 } 339 }
350 340
351 341
352 void GCTracer::AddContextDisposalTime(double time) { 342 void GCTracer::AddContextDisposalTime(double time) {
353 context_disposal_events_.push_front(ContextDisposalEvent(time)); 343 recorded_context_disposal_times_.Push(time);
354 } 344 }
355 345
356 346
357 void GCTracer::AddCompactionEvent(double duration, 347 void GCTracer::AddCompactionEvent(double duration,
358 intptr_t live_bytes_compacted) { 348 intptr_t live_bytes_compacted) {
359 compaction_events_.push_front( 349 recorded_compactions_.Push(
360 CompactionEvent(duration, live_bytes_compacted)); 350 MakeBytesAndDuration(live_bytes_compacted, duration));
361 } 351 }
362 352
363 353
364 void GCTracer::AddSurvivalRatio(double promotion_ratio) { 354 void GCTracer::AddSurvivalRatio(double promotion_ratio) {
365 survival_events_.push_front(SurvivalEvent(promotion_ratio)); 355 recorded_survival_ratios_.Push(promotion_ratio);
366 } 356 }
367 357
368 358
369 void GCTracer::AddIncrementalMarkingStep(double duration, intptr_t bytes) { 359 void GCTracer::AddIncrementalMarkingStep(double duration, intptr_t bytes) {
370 cumulative_incremental_marking_steps_++; 360 cumulative_incremental_marking_steps_++;
371 cumulative_incremental_marking_bytes_ += bytes; 361 cumulative_incremental_marking_bytes_ += bytes;
372 cumulative_incremental_marking_duration_ += duration; 362 cumulative_incremental_marking_duration_ += duration;
373 longest_incremental_marking_step_ = 363 longest_incremental_marking_step_ =
374 Max(longest_incremental_marking_step_, duration); 364 Max(longest_incremental_marking_step_, duration);
375 cumulative_marking_duration_ += duration; 365 cumulative_marking_duration_ += duration;
(...skipping 286 matching lines...) Expand 10 before | Expand all | Expand 10 after
662 ContextDisposalRateInMilliseconds(), 652 ContextDisposalRateInMilliseconds(),
663 CompactionSpeedInBytesPerMillisecond()); 653 CompactionSpeedInBytesPerMillisecond());
664 break; 654 break;
665 case Event::START: 655 case Event::START:
666 break; 656 break;
667 default: 657 default:
668 UNREACHABLE(); 658 UNREACHABLE();
669 } 659 }
670 } 660 }
671 661
672 662 int GCTracer::AverageSpeed(const RingBuffer<BytesAndDuration>& buffer,
673 double GCTracer::MeanDuration(const EventBuffer& events) const { 663 const BytesAndDuration& initial, double time_ms) {
674 if (events.empty()) return 0.0; 664 BytesAndDuration sum = buffer.Sum(
675 665 [time_ms](BytesAndDuration a, BytesAndDuration b) {
676 double mean = 0.0; 666 if (time_ms != 0 && a.second >= time_ms) return a;
677 EventBuffer::const_iterator iter = events.begin(); 667 return std::make_pair(a.first + b.first, a.second + b.second);
678 while (iter != events.end()) { 668 },
679 mean += iter->end_time - iter->start_time; 669 initial);
680 ++iter; 670 uint64_t bytes = sum.first;
681 } 671 double durations = sum.second;
682 672 if (durations == 0.0) return 0;
683 return mean / events.size(); 673 double speed = bytes / durations + 0.5;
674 const int max_speed = 1024 * MB;
675 const int min_speed = 1;
676 if (speed >= max_speed) return max_speed;
677 if (speed <= min_speed) return min_speed;
678 return static_cast<int>(speed);
684 } 679 }
685 680
686 681 int GCTracer::AverageSpeed(const RingBuffer<BytesAndDuration>& buffer) {
687 double GCTracer::MaxDuration(const EventBuffer& events) const { 682 return AverageSpeed(buffer, MakeBytesAndDuration(0, 0), 0);
688 if (events.empty()) return 0.0;
689
690 double maximum = 0.0f;
691 EventBuffer::const_iterator iter = events.begin();
692 while (iter != events.end()) {
693 maximum = Max(iter->end_time - iter->start_time, maximum);
694 ++iter;
695 }
696
697 return maximum;
698 } 683 }
699 684
700
701 intptr_t GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const { 685 intptr_t GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
702 if (cumulative_incremental_marking_duration_ == 0.0) return 0; 686 if (cumulative_incremental_marking_duration_ == 0.0) return 0;
703
704 // We haven't completed an entire round of incremental marking, yet. 687 // We haven't completed an entire round of incremental marking, yet.
705 // Use data from GCTracer instead of data from event buffers. 688 // Use data from GCTracer instead of data from event buffers.
706 if (incremental_mark_compactor_events_.empty()) { 689 if (recorded_incremental_marking_steps_.Count() == 0) {
707 return static_cast<intptr_t>(cumulative_incremental_marking_bytes_ / 690 return static_cast<intptr_t>(cumulative_incremental_marking_bytes_ /
708 cumulative_pure_incremental_marking_duration_); 691 cumulative_pure_incremental_marking_duration_);
709 } 692 }
710 693 return AverageSpeed(recorded_incremental_marking_steps_);
711 intptr_t bytes = 0;
712 double durations = 0.0;
713 EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
714 while (iter != incremental_mark_compactor_events_.end()) {
715 bytes += iter->incremental_marking_bytes;
716 durations += iter->pure_incremental_marking_duration;
717 ++iter;
718 }
719
720 if (durations == 0.0) return 0;
721 // Make sure the result is at least 1.
722 return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
723 } 694 }
724 695
725
726 intptr_t GCTracer::ScavengeSpeedInBytesPerMillisecond( 696 intptr_t GCTracer::ScavengeSpeedInBytesPerMillisecond(
727 ScavengeSpeedMode mode) const { 697 ScavengeSpeedMode mode) const {
728 intptr_t bytes = 0; 698 if (mode == kForAllObjects) {
729 double durations = 0.0; 699 return AverageSpeed(recorded_scavenges_total_);
730 EventBuffer::const_iterator iter = scavenger_events_.begin(); 700 } else {
731 while (iter != scavenger_events_.end()) { 701 return AverageSpeed(recorded_scavenges_survived_);
732 bytes += mode == kForAllObjects ? iter->new_space_object_size
733 : iter->survived_new_space_object_size;
734 durations += iter->end_time - iter->start_time;
735 ++iter;
736 } 702 }
737
738 if (durations == 0.0) return 0;
739 // Make sure the result is at least 1.
740 return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
741 } 703 }
742 704
743
744 intptr_t GCTracer::CompactionSpeedInBytesPerMillisecond() const { 705 intptr_t GCTracer::CompactionSpeedInBytesPerMillisecond() const {
745 if (compaction_events_.size() == 0) return 0; 706 return AverageSpeed(recorded_compactions_);
746 intptr_t bytes = 0;
747 double durations = 0.0;
748 CompactionEventBuffer::const_iterator iter = compaction_events_.begin();
749 while (iter != compaction_events_.end()) {
750 bytes += iter->live_bytes_compacted;
751 durations += iter->duration;
752 ++iter;
753 }
754
755 if (durations == 0.0) return 0;
756 // Make sure the result is at least 1.
757 return Max<intptr_t>(static_cast<intptr_t>(bytes / durations + 0.5), 1);
758 } 707 }
759 708
760
761 intptr_t GCTracer::MarkCompactSpeedInBytesPerMillisecond() const { 709 intptr_t GCTracer::MarkCompactSpeedInBytesPerMillisecond() const {
762 intptr_t bytes = 0; 710 return AverageSpeed(recorded_mark_compacts_);
763 double durations = 0.0;
764 EventBuffer::const_iterator iter = mark_compactor_events_.begin();
765 while (iter != mark_compactor_events_.end()) {
766 bytes += iter->start_object_size;
767 durations += iter->end_time - iter->start_time;
768 ++iter;
769 }
770
771 if (durations == 0.0) return 0;
772 // Make sure the result is at least 1.
773 return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
774 } 711 }
775 712
776
777 intptr_t GCTracer::FinalIncrementalMarkCompactSpeedInBytesPerMillisecond() 713 intptr_t GCTracer::FinalIncrementalMarkCompactSpeedInBytesPerMillisecond()
778 const { 714 const {
779 intptr_t bytes = 0; 715 return AverageSpeed(recorded_incremental_mark_compacts_);
780 double durations = 0.0;
781 EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
782 while (iter != incremental_mark_compactor_events_.end()) {
783 bytes += iter->start_object_size;
784 durations += iter->end_time - iter->start_time;
785 ++iter;
786 }
787
788 if (durations == 0.0) return 0;
789 // Make sure the result is at least 1.
790 return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
791 } 716 }
792 717
793
794 double GCTracer::CombinedMarkCompactSpeedInBytesPerMillisecond() { 718 double GCTracer::CombinedMarkCompactSpeedInBytesPerMillisecond() {
795 if (combined_mark_compact_speed_cache_ > 0) 719 if (combined_mark_compact_speed_cache_ > 0)
796 return combined_mark_compact_speed_cache_; 720 return combined_mark_compact_speed_cache_;
797 const double kMinimumMarkingSpeed = 0.5; 721 const double kMinimumMarkingSpeed = 0.5;
798 double speed1 = 722 double speed1 =
799 static_cast<double>(IncrementalMarkingSpeedInBytesPerMillisecond()); 723 static_cast<double>(IncrementalMarkingSpeedInBytesPerMillisecond());
800 double speed2 = static_cast<double>( 724 double speed2 = static_cast<double>(
801 FinalIncrementalMarkCompactSpeedInBytesPerMillisecond()); 725 FinalIncrementalMarkCompactSpeedInBytesPerMillisecond());
802 if (speed1 < kMinimumMarkingSpeed || speed2 < kMinimumMarkingSpeed) { 726 if (speed1 < kMinimumMarkingSpeed || speed2 < kMinimumMarkingSpeed) {
803 // No data for the incremental marking speed. 727 // No data for the incremental marking speed.
804 // Return the non-incremental mark-compact speed. 728 // Return the non-incremental mark-compact speed.
805 combined_mark_compact_speed_cache_ = 729 combined_mark_compact_speed_cache_ =
806 static_cast<double>(MarkCompactSpeedInBytesPerMillisecond()); 730 static_cast<double>(MarkCompactSpeedInBytesPerMillisecond());
807 } else { 731 } else {
808 // Combine the speed of incremental step and the speed of the final step. 732 // Combine the speed of incremental step and the speed of the final step.
809 // 1 / (1 / speed1 + 1 / speed2) = speed1 * speed2 / (speed1 + speed2). 733 // 1 / (1 / speed1 + 1 / speed2) = speed1 * speed2 / (speed1 + speed2).
810 combined_mark_compact_speed_cache_ = speed1 * speed2 / (speed1 + speed2); 734 combined_mark_compact_speed_cache_ = speed1 * speed2 / (speed1 + speed2);
811 } 735 }
812 return combined_mark_compact_speed_cache_; 736 return combined_mark_compact_speed_cache_;
813 } 737 }
814 738
815 double GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond( 739 double GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond(
816 double time_ms) const { 740 double time_ms) const {
817 size_t bytes = new_space_allocation_in_bytes_since_gc_; 741 size_t bytes = new_space_allocation_in_bytes_since_gc_;
818 double durations = allocation_duration_since_gc_; 742 double durations = allocation_duration_since_gc_;
819 AllocationEventBuffer::const_iterator iter = 743 return AverageSpeed(recorded_new_generation_allocations_,
820 new_space_allocation_events_.begin(); 744 MakeBytesAndDuration(bytes, durations), time_ms);
821 const size_t max_bytes = static_cast<size_t>(-1);
822 while (iter != new_space_allocation_events_.end() &&
823 bytes < max_bytes - bytes && (time_ms == 0 || durations < time_ms)) {
824 bytes += iter->allocation_in_bytes_;
825 durations += iter->duration_;
826 ++iter;
827 }
828
829 if (durations == 0.0) return 0;
830
831 // Make sure the result is at least 1.
832 return Max<double>(bytes / durations, 1);
833 } 745 }
834 746
835 double GCTracer::OldGenerationAllocationThroughputInBytesPerMillisecond( 747 double GCTracer::OldGenerationAllocationThroughputInBytesPerMillisecond(
836 double time_ms) const { 748 double time_ms) const {
837 size_t bytes = old_generation_allocation_in_bytes_since_gc_; 749 size_t bytes = old_generation_allocation_in_bytes_since_gc_;
838 double durations = allocation_duration_since_gc_; 750 double durations = allocation_duration_since_gc_;
839 AllocationEventBuffer::const_iterator iter = 751 return AverageSpeed(recorded_old_generation_allocations_,
840 old_generation_allocation_events_.begin(); 752 MakeBytesAndDuration(bytes, durations), time_ms);
841 const size_t max_bytes = static_cast<size_t>(-1);
842 while (iter != old_generation_allocation_events_.end() &&
843 bytes < max_bytes - bytes && (time_ms == 0 || durations < time_ms)) {
844 bytes += iter->allocation_in_bytes_;
845 durations += iter->duration_;
846 ++iter;
847 }
848
849 if (durations == 0.0) return 0;
850 // Make sure the result is at least 1.
851 return Max<double>(bytes / durations, 1);
852 } 753 }
853 754
854 double GCTracer::AllocationThroughputInBytesPerMillisecond( 755 double GCTracer::AllocationThroughputInBytesPerMillisecond(
855 double time_ms) const { 756 double time_ms) const {
856 return NewSpaceAllocationThroughputInBytesPerMillisecond(time_ms) + 757 return NewSpaceAllocationThroughputInBytesPerMillisecond(time_ms) +
857 OldGenerationAllocationThroughputInBytesPerMillisecond(time_ms); 758 OldGenerationAllocationThroughputInBytesPerMillisecond(time_ms);
858 } 759 }
859 760
860 761
861 size_t GCTracer::CurrentAllocationThroughputInBytesPerMillisecond() const { 762 size_t GCTracer::CurrentAllocationThroughputInBytesPerMillisecond() const {
862 return AllocationThroughputInBytesPerMillisecond(kThroughputTimeFrameMs); 763 return AllocationThroughputInBytesPerMillisecond(kThroughputTimeFrameMs);
863 } 764 }
864 765
865 766
866 size_t GCTracer::CurrentOldGenerationAllocationThroughputInBytesPerMillisecond() 767 size_t GCTracer::CurrentOldGenerationAllocationThroughputInBytesPerMillisecond()
867 const { 768 const {
868 return OldGenerationAllocationThroughputInBytesPerMillisecond( 769 return OldGenerationAllocationThroughputInBytesPerMillisecond(
869 kThroughputTimeFrameMs); 770 kThroughputTimeFrameMs);
870 } 771 }
871 772
872
873 double GCTracer::ContextDisposalRateInMilliseconds() const { 773 double GCTracer::ContextDisposalRateInMilliseconds() const {
874 if (context_disposal_events_.size() < kRingBufferMaxSize) return 0.0; 774 if (recorded_context_disposal_times_.Count() <
875 775 recorded_context_disposal_times_.kSize)
776 return 0.0;
876 double begin = heap_->MonotonicallyIncreasingTimeInMs(); 777 double begin = heap_->MonotonicallyIncreasingTimeInMs();
877 double end = 0.0; 778 double end = recorded_context_disposal_times_.Sum(
878 ContextDisposalEventBuffer::const_iterator iter = 779 [](double a, double b) { return b; }, 0.0);
879 context_disposal_events_.begin(); 780 return (begin - end) / recorded_context_disposal_times_.Count();
880 while (iter != context_disposal_events_.end()) {
881 end = iter->time_;
882 ++iter;
883 }
884
885 return (begin - end) / context_disposal_events_.size();
886 } 781 }
887 782
888
889 double GCTracer::AverageSurvivalRatio() const { 783 double GCTracer::AverageSurvivalRatio() const {
890 if (survival_events_.size() == 0) return 0.0; 784 if (recorded_survival_ratios_.Count() == 0) return 0.0;
891 785 double sum = recorded_survival_ratios_.Sum(
892 double sum_of_rates = 0.0; 786 [](double a, double b) { return a + b; }, 0.0);
893 SurvivalEventBuffer::const_iterator iter = survival_events_.begin(); 787 return sum / recorded_survival_ratios_.Count();
894 while (iter != survival_events_.end()) {
895 sum_of_rates += iter->promotion_ratio_;
896 ++iter;
897 }
898
899 return sum_of_rates / static_cast<double>(survival_events_.size());
900 } 788 }
901 789
902
903 bool GCTracer::SurvivalEventsRecorded() const { 790 bool GCTracer::SurvivalEventsRecorded() const {
904 return survival_events_.size() > 0; 791 return recorded_survival_ratios_.Count() > 0;
905 } 792 }
906 793
907 794 void GCTracer::ResetSurvivalEvents() { recorded_survival_ratios_.Reset(); }
908 void GCTracer::ResetSurvivalEvents() { survival_events_.reset(); }
909 } // namespace internal 795 } // namespace internal
910 } // namespace v8 796 } // namespace v8
OLDNEW
« no previous file with comments | « src/heap/gc-tracer.h ('k') | test/cctest/cctest.gyp » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698