OLD | NEW |
---|---|
1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #include "src/heap/gc-tracer.h" | 5 #include "src/heap/gc-tracer.h" |
6 | 6 |
7 #include "src/counters.h" | 7 #include "src/counters.h" |
8 #include "src/heap/heap-inl.h" | 8 #include "src/heap/heap-inl.h" |
9 #include "src/isolate.h" | 9 #include "src/isolate.h" |
10 | 10 |
11 namespace v8 { | 11 namespace v8 { |
12 namespace internal { | 12 namespace internal { |
13 | 13 |
14 static intptr_t CountTotalHolesSize(Heap* heap) { | 14 static size_t CountTotalHolesSize(Heap* heap) { |
15 intptr_t holes_size = 0; | 15 size_t holes_size = 0; |
16 OldSpaces spaces(heap); | 16 OldSpaces spaces(heap); |
17 for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) { | 17 for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) { |
18 holes_size += space->Waste() + space->Available(); | 18 DCHECK_GE(space->Waste(), 0); |
19 DCHECK_GE(space->Available(), 0); | |
Michael Lippautz
2016/10/11 14:54:38
nit:
DCHECK_GE(holes_size + space->Waste() + sp
| |
20 holes_size += static_cast<size_t>(space->Waste() + space->Available()); | |
19 } | 21 } |
20 return holes_size; | 22 return holes_size; |
21 } | 23 } |
22 | 24 |
23 | 25 |
24 GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope) | 26 GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope) |
25 : tracer_(tracer), scope_(scope) { | 27 : tracer_(tracer), scope_(scope) { |
26 // All accesses to incremental_marking_scope assume that incremental marking | 28 // All accesses to incremental_marking_scope assume that incremental marking |
27 // scopes come first. | 29 // scopes come first. |
28 STATIC_ASSERT(FIRST_INCREMENTAL_SCOPE == 0); | 30 STATIC_ASSERT(FIRST_INCREMENTAL_SCOPE == 0); |
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
181 current_.new_space_object_size = | 183 current_.new_space_object_size = |
182 heap_->new_space()->top() - heap_->new_space()->bottom(); | 184 heap_->new_space()->top() - heap_->new_space()->bottom(); |
183 | 185 |
184 current_.incremental_marking_bytes = 0; | 186 current_.incremental_marking_bytes = 0; |
185 current_.incremental_marking_duration = 0; | 187 current_.incremental_marking_duration = 0; |
186 | 188 |
187 for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) { | 189 for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) { |
188 current_.scopes[i] = 0; | 190 current_.scopes[i] = 0; |
189 } | 191 } |
190 | 192 |
191 int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB); | 193 size_t committed_memory = heap_->CommittedMemory() / KB; |
192 int used_memory = static_cast<int>(current_.start_object_size / KB); | 194 size_t used_memory = current_.start_object_size / KB; |
193 | 195 |
194 Counters* counters = heap_->isolate()->counters(); | 196 Counters* counters = heap_->isolate()->counters(); |
195 | 197 |
196 if (collector == SCAVENGER) { | 198 if (collector == SCAVENGER) { |
197 counters->scavenge_reason()->AddSample(static_cast<int>(gc_reason)); | 199 counters->scavenge_reason()->AddSample(static_cast<int>(gc_reason)); |
198 } else { | 200 } else { |
199 counters->mark_compact_reason()->AddSample(static_cast<int>(gc_reason)); | 201 counters->mark_compact_reason()->AddSample(static_cast<int>(gc_reason)); |
200 } | 202 } |
201 counters->aggregated_memory_heap_committed()->AddSample(start_time, | 203 counters->aggregated_memory_heap_committed()->AddSample(start_time, |
202 committed_memory); | 204 committed_memory); |
(...skipping 31 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
234 current_.type == Event::INCREMENTAL_MARK_COMPACTOR))); | 236 current_.type == Event::INCREMENTAL_MARK_COMPACTOR))); |
235 | 237 |
236 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs(); | 238 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs(); |
237 current_.end_object_size = heap_->SizeOfObjects(); | 239 current_.end_object_size = heap_->SizeOfObjects(); |
238 current_.end_memory_size = heap_->memory_allocator()->Size(); | 240 current_.end_memory_size = heap_->memory_allocator()->Size(); |
239 current_.end_holes_size = CountTotalHolesSize(heap_); | 241 current_.end_holes_size = CountTotalHolesSize(heap_); |
240 current_.survived_new_space_object_size = heap_->SurvivedNewSpaceObjectSize(); | 242 current_.survived_new_space_object_size = heap_->SurvivedNewSpaceObjectSize(); |
241 | 243 |
242 AddAllocation(current_.end_time); | 244 AddAllocation(current_.end_time); |
243 | 245 |
244 int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB); | 246 size_t committed_memory = heap_->CommittedMemory() / KB; |
245 int used_memory = static_cast<int>(current_.end_object_size / KB); | 247 size_t used_memory = current_.end_object_size / KB; |
246 heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample( | 248 heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample( |
247 current_.end_time, committed_memory); | 249 current_.end_time, committed_memory); |
248 heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample( | 250 heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample( |
249 current_.end_time, used_memory); | 251 current_.end_time, used_memory); |
250 | 252 |
251 double duration = current_.end_time - current_.start_time; | 253 double duration = current_.end_time - current_.start_time; |
252 | 254 |
253 if (current_.type == Event::SCAVENGER) { | 255 if (current_.type == Event::SCAVENGER) { |
254 recorded_scavenges_total_.Push( | 256 recorded_scavenges_total_.Push( |
255 MakeBytesAndDuration(current_.new_space_object_size, duration)); | 257 MakeBytesAndDuration(current_.new_space_object_size, duration)); |
(...skipping 85 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
341 allocation_duration_since_gc_ = 0; | 343 allocation_duration_since_gc_ = 0; |
342 new_space_allocation_in_bytes_since_gc_ = 0; | 344 new_space_allocation_in_bytes_since_gc_ = 0; |
343 old_generation_allocation_in_bytes_since_gc_ = 0; | 345 old_generation_allocation_in_bytes_since_gc_ = 0; |
344 } | 346 } |
345 | 347 |
346 | 348 |
347 void GCTracer::AddContextDisposalTime(double time) { | 349 void GCTracer::AddContextDisposalTime(double time) { |
348 recorded_context_disposal_times_.Push(time); | 350 recorded_context_disposal_times_.Push(time); |
349 } | 351 } |
350 | 352 |
351 | |
352 void GCTracer::AddCompactionEvent(double duration, | 353 void GCTracer::AddCompactionEvent(double duration, |
353 intptr_t live_bytes_compacted) { | 354 size_t live_bytes_compacted) { |
354 recorded_compactions_.Push( | 355 recorded_compactions_.Push( |
355 MakeBytesAndDuration(live_bytes_compacted, duration)); | 356 MakeBytesAndDuration(live_bytes_compacted, duration)); |
356 } | 357 } |
357 | 358 |
358 | 359 |
359 void GCTracer::AddSurvivalRatio(double promotion_ratio) { | 360 void GCTracer::AddSurvivalRatio(double promotion_ratio) { |
360 recorded_survival_ratios_.Push(promotion_ratio); | 361 recorded_survival_ratios_.Push(promotion_ratio); |
361 } | 362 } |
362 | 363 |
363 | 364 void GCTracer::AddIncrementalMarkingStep(double duration, size_t bytes) { |
364 void GCTracer::AddIncrementalMarkingStep(double duration, intptr_t bytes) { | |
365 if (bytes > 0) { | 365 if (bytes > 0) { |
366 incremental_marking_bytes_ += bytes; | 366 incremental_marking_bytes_ += bytes; |
367 incremental_marking_duration_ += duration; | 367 incremental_marking_duration_ += duration; |
368 } | 368 } |
369 } | 369 } |
370 | 370 |
371 void GCTracer::Output(const char* format, ...) const { | 371 void GCTracer::Output(const char* format, ...) const { |
372 if (FLAG_trace_gc) { | 372 if (FLAG_trace_gc) { |
373 va_list arguments; | 373 va_list arguments; |
374 va_start(arguments, format); | 374 va_start(arguments, format); |
(...skipping 44 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
419 static_cast<double>(current_.end_memory_size) / MB, duration, | 419 static_cast<double>(current_.end_memory_size) / MB, duration, |
420 TotalExternalTime(), incremental_buffer, | 420 TotalExternalTime(), incremental_buffer, |
421 Heap::GarbageCollectionReasonToString(current_.gc_reason), | 421 Heap::GarbageCollectionReasonToString(current_.gc_reason), |
422 current_.collector_reason != nullptr ? current_.collector_reason : ""); | 422 current_.collector_reason != nullptr ? current_.collector_reason : ""); |
423 } | 423 } |
424 | 424 |
425 | 425 |
426 void GCTracer::PrintNVP() const { | 426 void GCTracer::PrintNVP() const { |
427 double duration = current_.end_time - current_.start_time; | 427 double duration = current_.end_time - current_.start_time; |
428 double spent_in_mutator = current_.start_time - previous_.end_time; | 428 double spent_in_mutator = current_.start_time - previous_.end_time; |
429 intptr_t allocated_since_last_gc = | 429 size_t allocated_since_last_gc = |
430 current_.start_object_size - previous_.end_object_size; | 430 current_.start_object_size - previous_.end_object_size; |
431 | 431 |
432 double incremental_walltime_duration = 0; | 432 double incremental_walltime_duration = 0; |
433 | 433 |
434 if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) { | 434 if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) { |
435 incremental_walltime_duration = | 435 incremental_walltime_duration = |
436 current_.end_time - incremental_marking_start_time_; | 436 current_.end_time - incremental_marking_start_time_; |
437 } | 437 } |
438 | 438 |
439 switch (current_.type) { | 439 switch (current_.type) { |
(...skipping 250 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
690 const int min_speed = 1; | 690 const int min_speed = 1; |
691 if (speed >= max_speed) return max_speed; | 691 if (speed >= max_speed) return max_speed; |
692 if (speed <= min_speed) return min_speed; | 692 if (speed <= min_speed) return min_speed; |
693 return speed; | 693 return speed; |
694 } | 694 } |
695 | 695 |
696 double GCTracer::AverageSpeed(const RingBuffer<BytesAndDuration>& buffer) { | 696 double GCTracer::AverageSpeed(const RingBuffer<BytesAndDuration>& buffer) { |
697 return AverageSpeed(buffer, MakeBytesAndDuration(0, 0), 0); | 697 return AverageSpeed(buffer, MakeBytesAndDuration(0, 0), 0); |
698 } | 698 } |
699 | 699 |
700 void GCTracer::RecordIncrementalMarkingSpeed(intptr_t bytes, double duration) { | 700 void GCTracer::RecordIncrementalMarkingSpeed(size_t bytes, double duration) { |
701 if (duration == 0 || bytes == 0) return; | 701 if (duration == 0 || bytes == 0) return; |
702 double current_speed = bytes / duration; | 702 double current_speed = bytes / duration; |
703 if (recorded_incremental_marking_speed_ == 0) { | 703 if (recorded_incremental_marking_speed_ == 0) { |
704 recorded_incremental_marking_speed_ = current_speed; | 704 recorded_incremental_marking_speed_ = current_speed; |
705 } else { | 705 } else { |
706 recorded_incremental_marking_speed_ = | 706 recorded_incremental_marking_speed_ = |
707 (recorded_incremental_marking_speed_ + current_speed) / 2; | 707 (recorded_incremental_marking_speed_ + current_speed) / 2; |
708 } | 708 } |
709 } | 709 } |
710 | 710 |
(...skipping 102 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
813 } | 813 } |
814 | 814 |
815 void GCTracer::ResetSurvivalEvents() { recorded_survival_ratios_.Reset(); } | 815 void GCTracer::ResetSurvivalEvents() { recorded_survival_ratios_.Reset(); } |
816 | 816 |
817 void GCTracer::NotifyIncrementalMarkingStart() { | 817 void GCTracer::NotifyIncrementalMarkingStart() { |
818 incremental_marking_start_time_ = heap_->MonotonicallyIncreasingTimeInMs(); | 818 incremental_marking_start_time_ = heap_->MonotonicallyIncreasingTimeInMs(); |
819 } | 819 } |
820 | 820 |
821 } // namespace internal | 821 } // namespace internal |
822 } // namespace v8 | 822 } // namespace v8 |
OLD | NEW |