Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(9)

Side by Side Diff: src/heap/gc-tracer.cc

Issue 1125193005: Make new space allocation throughput estimation more accurate. (Closed) Base URL: https://chromium.googlesource.com/v8/v8.git@master
Patch Set: Created 5 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2014 the V8 project authors. All rights reserved. 1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be 2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file. 3 // found in the LICENSE file.
4 4
5 #include "src/v8.h" 5 #include "src/v8.h"
6 6
7 #include "src/heap/gc-tracer.h" 7 #include "src/heap/gc-tracer.h"
8 8
9 namespace v8 { 9 namespace v8 {
10 namespace internal { 10 namespace internal {
11 11
12 static intptr_t CountTotalHolesSize(Heap* heap) { 12 static intptr_t CountTotalHolesSize(Heap* heap) {
13 intptr_t holes_size = 0; 13 intptr_t holes_size = 0;
14 OldSpaces spaces(heap); 14 OldSpaces spaces(heap);
15 for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) { 15 for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) {
16 holes_size += space->Waste() + space->Available(); 16 holes_size += space->Waste() + space->Available();
17 } 17 }
18 return holes_size; 18 return holes_size;
19 } 19 }
20 20
21 21
22 GCTracer::AllocationEvent::AllocationEvent(double duration, 22 GCTracer::AllocationEvent::AllocationEvent(double duration,
23 intptr_t allocation_in_bytes) { 23 size_t allocation_in_bytes) {
24 duration_ = duration; 24 duration_ = duration;
25 allocation_in_bytes_ = allocation_in_bytes; 25 allocation_in_bytes_ = allocation_in_bytes;
26 } 26 }
27 27
28 28
29 GCTracer::ContextDisposalEvent::ContextDisposalEvent(double time) { 29 GCTracer::ContextDisposalEvent::ContextDisposalEvent(double time) {
30 time_ = time; 30 time_ = time;
31 } 31 }
32 32
33 33
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
92 92
93 GCTracer::GCTracer(Heap* heap) 93 GCTracer::GCTracer(Heap* heap)
94 : heap_(heap), 94 : heap_(heap),
95 cumulative_incremental_marking_steps_(0), 95 cumulative_incremental_marking_steps_(0),
96 cumulative_incremental_marking_bytes_(0), 96 cumulative_incremental_marking_bytes_(0),
97 cumulative_incremental_marking_duration_(0.0), 97 cumulative_incremental_marking_duration_(0.0),
98 cumulative_pure_incremental_marking_duration_(0.0), 98 cumulative_pure_incremental_marking_duration_(0.0),
99 longest_incremental_marking_step_(0.0), 99 longest_incremental_marking_step_(0.0),
100 cumulative_marking_duration_(0.0), 100 cumulative_marking_duration_(0.0),
101 cumulative_sweeping_duration_(0.0), 101 cumulative_sweeping_duration_(0.0),
102 new_space_top_after_gc_(0), 102 new_space_allocation_time_ms_(0.0),
103 new_space_allocation_counter_bytes_(0),
103 start_counter_(0) { 104 start_counter_(0) {
104 current_ = Event(Event::START, NULL, NULL); 105 current_ = Event(Event::START, NULL, NULL);
105 current_.end_time = base::OS::TimeCurrentMillis(); 106 current_.end_time = base::OS::TimeCurrentMillis();
106 previous_ = previous_incremental_mark_compactor_event_ = current_; 107 previous_ = previous_incremental_mark_compactor_event_ = current_;
107 } 108 }
108 109
109 110
110 void GCTracer::Start(GarbageCollector collector, const char* gc_reason, 111 void GCTracer::Start(GarbageCollector collector, const char* gc_reason,
111 const char* collector_reason) { 112 const char* collector_reason) {
112 start_counter_++; 113 start_counter_++;
113 if (start_counter_ != 1) return; 114 if (start_counter_ != 1) return;
114 115
115 previous_ = current_; 116 previous_ = current_;
116 double start_time = heap_->MonotonicallyIncreasingTimeInMs(); 117 double start_time = heap_->MonotonicallyIncreasingTimeInMs();
117 if (new_space_top_after_gc_ != 0) { 118 SampleNewSpaceAllocation(start_time, heap_->NewSpaceAllocationCounter());
118 AddNewSpaceAllocationTime(
119 start_time - previous_.end_time,
120 reinterpret_cast<intptr_t>((heap_->new_space()->top()) -
121 new_space_top_after_gc_));
122 }
123 if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) 119 if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR)
124 previous_incremental_mark_compactor_event_ = current_; 120 previous_incremental_mark_compactor_event_ = current_;
125 121
126 if (collector == SCAVENGER) { 122 if (collector == SCAVENGER) {
127 current_ = Event(Event::SCAVENGER, gc_reason, collector_reason); 123 current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
128 } else if (collector == MARK_COMPACTOR) { 124 } else if (collector == MARK_COMPACTOR) {
129 if (heap_->incremental_marking()->WasActivated()) { 125 if (heap_->incremental_marking()->WasActivated()) {
130 current_ = 126 current_ =
131 Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason, collector_reason); 127 Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason, collector_reason);
132 } else { 128 } else {
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after
177 DCHECK(start_counter_ >= 0); 173 DCHECK(start_counter_ >= 0);
178 DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) || 174 DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) ||
179 (collector == MARK_COMPACTOR && 175 (collector == MARK_COMPACTOR &&
180 (current_.type == Event::MARK_COMPACTOR || 176 (current_.type == Event::MARK_COMPACTOR ||
181 current_.type == Event::INCREMENTAL_MARK_COMPACTOR))); 177 current_.type == Event::INCREMENTAL_MARK_COMPACTOR)));
182 178
183 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs(); 179 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
184 current_.end_object_size = heap_->SizeOfObjects(); 180 current_.end_object_size = heap_->SizeOfObjects();
185 current_.end_memory_size = heap_->isolate()->memory_allocator()->Size(); 181 current_.end_memory_size = heap_->isolate()->memory_allocator()->Size();
186 current_.end_holes_size = CountTotalHolesSize(heap_); 182 current_.end_holes_size = CountTotalHolesSize(heap_);
187 new_space_top_after_gc_ =
188 reinterpret_cast<intptr_t>(heap_->new_space()->top());
189 183
190 int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB); 184 int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB);
191 int used_memory = static_cast<int>(current_.end_object_size / KB); 185 int used_memory = static_cast<int>(current_.end_object_size / KB);
192 heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample( 186 heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample(
193 current_.end_time, committed_memory); 187 current_.end_time, committed_memory);
194 heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample( 188 heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample(
195 current_.end_time, used_memory); 189 current_.end_time, used_memory);
196 190
197 if (current_.type == Event::SCAVENGER) { 191 if (current_.type == Event::SCAVENGER) {
198 current_.incremental_marking_steps = 192 current_.incremental_marking_steps =
(...skipping 53 matching lines...) Expand 10 before | Expand all | Expand 10 after
252 if (FLAG_trace_gc_nvp) 246 if (FLAG_trace_gc_nvp)
253 PrintNVP(); 247 PrintNVP();
254 else 248 else
255 Print(); 249 Print();
256 250
257 heap_->PrintShortHeapStatistics(); 251 heap_->PrintShortHeapStatistics();
258 } 252 }
259 } 253 }
260 254
261 255
262 void GCTracer::AddNewSpaceAllocationTime(double duration, 256 void GCTracer::SampleNewSpaceAllocation(double current_ms,
263 intptr_t allocation_in_bytes) { 257 size_t counter_bytes) {
264 allocation_events_.push_front(AllocationEvent(duration, allocation_in_bytes)); 258 if (new_space_allocation_time_ms_ == 0) {
259 // It is the first sample.
260 new_space_allocation_time_ms_ = current_ms;
261 new_space_allocation_counter_bytes_ = counter_bytes;
262 return;
263 }
264 // This assumes that counters are unsigned integers so that the subtraction
265 // below works even if the new counter is less then the old counter.
266 size_t allocated_bytes = counter_bytes - new_space_allocation_counter_bytes_;
267 double duration = current_ms - new_space_allocation_time_ms_;
268 const double kMinDurationMs = 1;
269 if (duration < kMinDurationMs) {
270 // Do not sample small durations to avoid precision errors.
271 return;
272 }
273 new_space_allocation_time_ms_ = current_ms;
274 new_space_allocation_counter_bytes_ = counter_bytes;
275 allocation_events_.push_front(AllocationEvent(duration, allocated_bytes));
265 } 276 }
266 277
267 278
268 void GCTracer::AddContextDisposalTime(double time) { 279 void GCTracer::AddContextDisposalTime(double time) {
269 context_disposal_events_.push_front(ContextDisposalEvent(time)); 280 context_disposal_events_.push_front(ContextDisposalEvent(time));
270 } 281 }
271 282
272 283
273 void GCTracer::AddSurvivalRatio(double promotion_ratio) { 284 void GCTracer::AddSurvivalRatio(double promotion_ratio) {
274 survival_events_.push_front(SurvivalEvent(promotion_ratio)); 285 survival_events_.push_front(SurvivalEvent(promotion_ratio));
(...skipping 273 matching lines...) Expand 10 before | Expand all | Expand 10 after
548 durations += iter->end_time - iter->start_time; 559 durations += iter->end_time - iter->start_time;
549 ++iter; 560 ++iter;
550 } 561 }
551 562
552 if (durations == 0.0) return 0; 563 if (durations == 0.0) return 0;
553 564
554 return static_cast<intptr_t>(bytes / durations); 565 return static_cast<intptr_t>(bytes / durations);
555 } 566 }
556 567
557 568
558 intptr_t GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond() const { 569 size_t GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond() const {
559 intptr_t bytes = 0; 570 size_t bytes = 0;
560 double durations = 0.0; 571 double durations = 0.0;
561 AllocationEventBuffer::const_iterator iter = allocation_events_.begin(); 572 AllocationEventBuffer::const_iterator iter = allocation_events_.begin();
562 while (iter != allocation_events_.end()) { 573 const size_t max_bytes = static_cast<size_t>(-1);
574 while (iter != allocation_events_.end() && bytes < max_bytes - bytes) {
563 bytes += iter->allocation_in_bytes_; 575 bytes += iter->allocation_in_bytes_;
564 durations += iter->duration_; 576 durations += iter->duration_;
565 ++iter; 577 ++iter;
566 } 578 }
567 579
568 if (durations == 0.0) return 0; 580 if (durations == 0.0) return 0;
569 581
570 return static_cast<intptr_t>(bytes / durations); 582 return static_cast<size_t>(bytes / durations + 0.5);
571 } 583 }
572 584
573 585
574 double GCTracer::ContextDisposalRateInMilliseconds() const { 586 double GCTracer::ContextDisposalRateInMilliseconds() const {
575 if (context_disposal_events_.size() < kRingBufferMaxSize) return 0.0; 587 if (context_disposal_events_.size() < kRingBufferMaxSize) return 0.0;
576 588
577 double begin = base::OS::TimeCurrentMillis(); 589 double begin = base::OS::TimeCurrentMillis();
578 double end = 0.0; 590 double end = 0.0;
579 ContextDisposalEventBuffer::const_iterator iter = 591 ContextDisposalEventBuffer::const_iterator iter =
580 context_disposal_events_.begin(); 592 context_disposal_events_.begin();
(...skipping 21 matching lines...) Expand all
602 614
603 615
604 bool GCTracer::SurvivalEventsRecorded() const { 616 bool GCTracer::SurvivalEventsRecorded() const {
605 return survival_events_.size() > 0; 617 return survival_events_.size() > 0;
606 } 618 }
607 619
608 620
609 void GCTracer::ResetSurvivalEvents() { survival_events_.reset(); } 621 void GCTracer::ResetSurvivalEvents() { survival_events_.reset(); }
610 } 622 }
611 } // namespace v8::internal 623 } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698