| OLD | NEW |
| 1 // Copyright 2014 the V8 project authors. All rights reserved. | 1 // Copyright 2014 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/heap/gc-tracer.h" | 5 #include "src/heap/gc-tracer.h" |
| 6 | 6 |
| 7 #include "src/counters.h" | 7 #include "src/counters.h" |
| 8 #include "src/heap/heap-inl.h" | 8 #include "src/heap/heap-inl.h" |
| 9 #include "src/isolate.h" | 9 #include "src/isolate.h" |
| 10 | 10 |
| 11 namespace v8 { | 11 namespace v8 { |
| 12 namespace internal { | 12 namespace internal { |
| 13 | 13 |
| 14 static intptr_t CountTotalHolesSize(Heap* heap) { | 14 static intptr_t CountTotalHolesSize(Heap* heap) { |
| 15 intptr_t holes_size = 0; | 15 intptr_t holes_size = 0; |
| 16 OldSpaces spaces(heap); | 16 OldSpaces spaces(heap); |
| 17 for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) { | 17 for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) { |
| 18 holes_size += space->Waste() + space->Available(); | 18 holes_size += space->Waste() + space->Available(); |
| 19 } | 19 } |
| 20 return holes_size; | 20 return holes_size; |
| 21 } | 21 } |
| 22 | 22 |
| 23 | 23 |
| 24 GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope) | 24 GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope) |
| 25 : tracer_(tracer), scope_(scope) { | 25 : tracer_(tracer), scope_(scope) { |
| 26 // All accesses to incremental_marking_scope assume that incremental marking |
| 27 // scopes come first. |
| 28 STATIC_ASSERT(FIRST_INCREMENTAL_SCOPE == 0); |
| 26 start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs(); | 29 start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs(); |
| 27 // TODO(cbruni): remove once we fully moved to a trace-based system. | 30 // TODO(cbruni): remove once we fully moved to a trace-based system. |
| 28 if (TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() || | 31 if (TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() || |
| 29 FLAG_runtime_call_stats) { | 32 FLAG_runtime_call_stats) { |
| 30 RuntimeCallStats::Enter( | 33 RuntimeCallStats::Enter( |
| 31 tracer_->heap_->isolate()->counters()->runtime_call_stats(), &timer_, | 34 tracer_->heap_->isolate()->counters()->runtime_call_stats(), &timer_, |
| 32 &RuntimeCallStats::GC); | 35 &RuntimeCallStats::GC); |
| 33 } | 36 } |
| 34 } | 37 } |
| 35 | 38 |
| (...skipping 30 matching lines...) Expand all Loading... |
| 66 end_time(0.0), | 69 end_time(0.0), |
| 67 reduce_memory(false), | 70 reduce_memory(false), |
| 68 start_object_size(0), | 71 start_object_size(0), |
| 69 end_object_size(0), | 72 end_object_size(0), |
| 70 start_memory_size(0), | 73 start_memory_size(0), |
| 71 end_memory_size(0), | 74 end_memory_size(0), |
| 72 start_holes_size(0), | 75 start_holes_size(0), |
| 73 end_holes_size(0), | 76 end_holes_size(0), |
| 74 new_space_object_size(0), | 77 new_space_object_size(0), |
| 75 survived_new_space_object_size(0), | 78 survived_new_space_object_size(0), |
| 76 cumulative_incremental_marking_bytes(0), | |
| 77 incremental_marking_bytes(0), | 79 incremental_marking_bytes(0), |
| 78 cumulative_pure_incremental_marking_duration(0.0), | 80 incremental_marking_duration(0.0) { |
| 79 pure_incremental_marking_duration(0.0) { | |
| 80 for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) { | 81 for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) { |
| 81 scopes[i] = 0; | 82 scopes[i] = 0; |
| 82 } | 83 } |
| 83 } | 84 } |
| 84 | 85 |
| 85 | 86 |
| 86 const char* GCTracer::Event::TypeName(bool short_name) const { | 87 const char* GCTracer::Event::TypeName(bool short_name) const { |
| 87 switch (type) { | 88 switch (type) { |
| 88 case SCAVENGER: | 89 case SCAVENGER: |
| 89 if (short_name) { | 90 if (short_name) { |
| (...skipping 15 matching lines...) Expand all Loading... |
| 105 return "Start"; | 106 return "Start"; |
| 106 } | 107 } |
| 107 } | 108 } |
| 108 return "Unknown Event Type"; | 109 return "Unknown Event Type"; |
| 109 } | 110 } |
| 110 | 111 |
| 111 GCTracer::GCTracer(Heap* heap) | 112 GCTracer::GCTracer(Heap* heap) |
| 112 : heap_(heap), | 113 : heap_(heap), |
| 113 current_(Event::START, GarbageCollectionReason::kUnknown, nullptr), | 114 current_(Event::START, GarbageCollectionReason::kUnknown, nullptr), |
| 114 previous_(current_), | 115 previous_(current_), |
| 115 previous_incremental_mark_compactor_event_(current_), | 116 incremental_marking_bytes_(0), |
| 116 cumulative_incremental_marking_bytes_(0), | 117 incremental_marking_duration_(0.0), |
| 117 cumulative_incremental_marking_duration_(0.0), | 118 recorded_incremental_marking_speed_(0.0), |
| 118 cumulative_pure_incremental_marking_duration_(0.0), | |
| 119 allocation_time_ms_(0.0), | 119 allocation_time_ms_(0.0), |
| 120 new_space_allocation_counter_bytes_(0), | 120 new_space_allocation_counter_bytes_(0), |
| 121 old_generation_allocation_counter_bytes_(0), | 121 old_generation_allocation_counter_bytes_(0), |
| 122 allocation_duration_since_gc_(0.0), | 122 allocation_duration_since_gc_(0.0), |
| 123 new_space_allocation_in_bytes_since_gc_(0), | 123 new_space_allocation_in_bytes_since_gc_(0), |
| 124 old_generation_allocation_in_bytes_since_gc_(0), | 124 old_generation_allocation_in_bytes_since_gc_(0), |
| 125 combined_mark_compact_speed_cache_(0.0), | 125 combined_mark_compact_speed_cache_(0.0), |
| 126 start_counter_(0) { | 126 start_counter_(0) { |
| 127 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs(); | 127 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs(); |
| 128 } | 128 } |
| 129 | 129 |
| 130 void GCTracer::ResetForTesting() { | 130 void GCTracer::ResetForTesting() { |
| 131 current_ = Event(Event::START, GarbageCollectionReason::kTesting, nullptr); | 131 current_ = Event(Event::START, GarbageCollectionReason::kTesting, nullptr); |
| 132 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs(); | 132 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs(); |
| 133 previous_ = previous_incremental_mark_compactor_event_ = current_; | 133 previous_ = current_; |
| 134 cumulative_incremental_marking_bytes_ = 0.0; | 134 ResetIncrementalMarkingCounters(); |
| 135 cumulative_incremental_marking_duration_ = 0.0; | |
| 136 cumulative_pure_incremental_marking_duration_ = 0.0; | |
| 137 for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) { | |
| 138 incremental_marking_scopes_[i].cumulative_duration = 0.0; | |
| 139 incremental_marking_scopes_[i].steps = 0; | |
| 140 incremental_marking_scopes_[i].longest_step = 0.0; | |
| 141 } | |
| 142 allocation_time_ms_ = 0.0; | 135 allocation_time_ms_ = 0.0; |
| 143 new_space_allocation_counter_bytes_ = 0.0; | 136 new_space_allocation_counter_bytes_ = 0.0; |
| 144 old_generation_allocation_counter_bytes_ = 0.0; | 137 old_generation_allocation_counter_bytes_ = 0.0; |
| 145 allocation_duration_since_gc_ = 0.0; | 138 allocation_duration_since_gc_ = 0.0; |
| 146 new_space_allocation_in_bytes_since_gc_ = 0.0; | 139 new_space_allocation_in_bytes_since_gc_ = 0.0; |
| 147 old_generation_allocation_in_bytes_since_gc_ = 0.0; | 140 old_generation_allocation_in_bytes_since_gc_ = 0.0; |
| 148 combined_mark_compact_speed_cache_ = 0.0; | 141 combined_mark_compact_speed_cache_ = 0.0; |
| 149 recorded_scavenges_total_.Reset(); | 142 recorded_scavenges_total_.Reset(); |
| 150 recorded_scavenges_survived_.Reset(); | 143 recorded_scavenges_survived_.Reset(); |
| 151 recorded_compactions_.Reset(); | 144 recorded_compactions_.Reset(); |
| 152 recorded_mark_compacts_.Reset(); | 145 recorded_mark_compacts_.Reset(); |
| 153 recorded_incremental_mark_compacts_.Reset(); | 146 recorded_incremental_mark_compacts_.Reset(); |
| 154 recorded_new_generation_allocations_.Reset(); | 147 recorded_new_generation_allocations_.Reset(); |
| 155 recorded_old_generation_allocations_.Reset(); | 148 recorded_old_generation_allocations_.Reset(); |
| 156 recorded_context_disposal_times_.Reset(); | 149 recorded_context_disposal_times_.Reset(); |
| 157 recorded_survival_ratios_.Reset(); | 150 recorded_survival_ratios_.Reset(); |
| 158 start_counter_ = 0; | 151 start_counter_ = 0; |
| 159 } | 152 } |
| 160 | 153 |
| 161 void GCTracer::Start(GarbageCollector collector, | 154 void GCTracer::Start(GarbageCollector collector, |
| 162 GarbageCollectionReason gc_reason, | 155 GarbageCollectionReason gc_reason, |
| 163 const char* collector_reason) { | 156 const char* collector_reason) { |
| 164 start_counter_++; | 157 start_counter_++; |
| 165 if (start_counter_ != 1) return; | 158 if (start_counter_ != 1) return; |
| 166 | 159 |
| 167 previous_ = current_; | 160 previous_ = current_; |
| 168 double start_time = heap_->MonotonicallyIncreasingTimeInMs(); | 161 double start_time = heap_->MonotonicallyIncreasingTimeInMs(); |
| 169 SampleAllocation(start_time, heap_->NewSpaceAllocationCounter(), | 162 SampleAllocation(start_time, heap_->NewSpaceAllocationCounter(), |
| 170 heap_->OldGenerationAllocationCounter()); | 163 heap_->OldGenerationAllocationCounter()); |
| 171 if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) | |
| 172 previous_incremental_mark_compactor_event_ = current_; | |
| 173 | 164 |
| 174 if (collector == SCAVENGER) { | 165 if (collector == SCAVENGER) { |
| 175 current_ = Event(Event::SCAVENGER, gc_reason, collector_reason); | 166 current_ = Event(Event::SCAVENGER, gc_reason, collector_reason); |
| 176 } else if (collector == MARK_COMPACTOR) { | 167 } else if (collector == MARK_COMPACTOR) { |
| 177 if (heap_->incremental_marking()->WasActivated()) { | 168 if (heap_->incremental_marking()->WasActivated()) { |
| 178 current_ = | 169 current_ = |
| 179 Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason, collector_reason); | 170 Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason, collector_reason); |
| 180 } else { | 171 } else { |
| 181 current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason); | 172 current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason); |
| 182 } | 173 } |
| 183 } | 174 } |
| 184 | 175 |
| 185 current_.reduce_memory = heap_->ShouldReduceMemory(); | 176 current_.reduce_memory = heap_->ShouldReduceMemory(); |
| 186 current_.start_time = start_time; | 177 current_.start_time = start_time; |
| 187 current_.start_object_size = heap_->SizeOfObjects(); | 178 current_.start_object_size = heap_->SizeOfObjects(); |
| 188 current_.start_memory_size = heap_->memory_allocator()->Size(); | 179 current_.start_memory_size = heap_->memory_allocator()->Size(); |
| 189 current_.start_holes_size = CountTotalHolesSize(heap_); | 180 current_.start_holes_size = CountTotalHolesSize(heap_); |
| 190 current_.new_space_object_size = | 181 current_.new_space_object_size = |
| 191 heap_->new_space()->top() - heap_->new_space()->bottom(); | 182 heap_->new_space()->top() - heap_->new_space()->bottom(); |
| 192 | 183 |
| 193 current_.cumulative_incremental_marking_bytes = | 184 current_.incremental_marking_bytes = 0; |
| 194 cumulative_incremental_marking_bytes_; | 185 current_.incremental_marking_duration = 0; |
| 195 current_.cumulative_pure_incremental_marking_duration = | |
| 196 cumulative_pure_incremental_marking_duration_; | |
| 197 | 186 |
| 198 for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) { | 187 for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) { |
| 199 current_.scopes[i] = 0; | 188 current_.scopes[i] = 0; |
| 200 } | 189 } |
| 201 | 190 |
| 202 int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB); | 191 int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB); |
| 203 int used_memory = static_cast<int>(current_.start_object_size / KB); | 192 int used_memory = static_cast<int>(current_.start_object_size / KB); |
| 204 | 193 |
| 205 Counters* counters = heap_->isolate()->counters(); | 194 Counters* counters = heap_->isolate()->counters(); |
| 206 | 195 |
| 207 if (collector == SCAVENGER) { | 196 if (collector == SCAVENGER) { |
| 208 counters->scavenge_reason()->AddSample(static_cast<int>(gc_reason)); | 197 counters->scavenge_reason()->AddSample(static_cast<int>(gc_reason)); |
| 209 } else { | 198 } else { |
| 210 counters->mark_compact_reason()->AddSample(static_cast<int>(gc_reason)); | 199 counters->mark_compact_reason()->AddSample(static_cast<int>(gc_reason)); |
| 211 } | 200 } |
| 212 counters->aggregated_memory_heap_committed()->AddSample(start_time, | 201 counters->aggregated_memory_heap_committed()->AddSample(start_time, |
| 213 committed_memory); | 202 committed_memory); |
| 214 counters->aggregated_memory_heap_used()->AddSample(start_time, used_memory); | 203 counters->aggregated_memory_heap_used()->AddSample(start_time, used_memory); |
| 215 // TODO(cbruni): remove once we fully moved to a trace-based system. | 204 // TODO(cbruni): remove once we fully moved to a trace-based system. |
| 216 if (TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() || | 205 if (TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_ENABLED() || |
| 217 FLAG_runtime_call_stats) { | 206 FLAG_runtime_call_stats) { |
| 218 RuntimeCallStats::Enter(heap_->isolate()->counters()->runtime_call_stats(), | 207 RuntimeCallStats::Enter(heap_->isolate()->counters()->runtime_call_stats(), |
| 219 &timer_, &RuntimeCallStats::GC); | 208 &timer_, &RuntimeCallStats::GC); |
| 220 } | 209 } |
| 221 } | 210 } |
| 222 | 211 |
| 223 void GCTracer::MergeBaseline(const Event& baseline) { | 212 void GCTracer::ResetIncrementalMarkingCounters() { |
| 224 current_.incremental_marking_bytes = | 213 incremental_marking_bytes_ = 0; |
| 225 current_.cumulative_incremental_marking_bytes - | 214 incremental_marking_duration_ = 0; |
| 226 baseline.cumulative_incremental_marking_bytes; | 215 for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) { |
| 227 current_.pure_incremental_marking_duration = | 216 incremental_marking_scopes_[i].ResetCurrentCycle(); |
| 228 current_.cumulative_pure_incremental_marking_duration - | |
| 229 baseline.cumulative_pure_incremental_marking_duration; | |
| 230 for (int i = Scope::FIRST_INCREMENTAL_SCOPE; | |
| 231 i <= Scope::LAST_INCREMENTAL_SCOPE; i++) { | |
| 232 current_.scopes[i] = | |
| 233 current_.incremental_marking_scopes[i].cumulative_duration - | |
| 234 baseline.incremental_marking_scopes[i].cumulative_duration; | |
| 235 } | 217 } |
| 236 } | 218 } |
| 237 | 219 |
| 238 void GCTracer::Stop(GarbageCollector collector) { | 220 void GCTracer::Stop(GarbageCollector collector) { |
| 239 start_counter_--; | 221 start_counter_--; |
| 240 if (start_counter_ != 0) { | 222 if (start_counter_ != 0) { |
| 241 heap_->isolate()->PrintWithTimestamp( | 223 heap_->isolate()->PrintWithTimestamp( |
| 242 "[Finished reentrant %s during %s.]\n", | 224 "[Finished reentrant %s during %s.]\n", |
| 243 collector == SCAVENGER ? "Scavenge" : "Mark-sweep", | 225 collector == SCAVENGER ? "Scavenge" : "Mark-sweep", |
| 244 current_.TypeName(false)); | 226 current_.TypeName(false)); |
| 245 return; | 227 return; |
| 246 } | 228 } |
| 247 | 229 |
| 248 DCHECK(start_counter_ >= 0); | 230 DCHECK(start_counter_ >= 0); |
| 249 DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) || | 231 DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) || |
| 250 (collector == MARK_COMPACTOR && | 232 (collector == MARK_COMPACTOR && |
| 251 (current_.type == Event::MARK_COMPACTOR || | 233 (current_.type == Event::MARK_COMPACTOR || |
| 252 current_.type == Event::INCREMENTAL_MARK_COMPACTOR))); | 234 current_.type == Event::INCREMENTAL_MARK_COMPACTOR))); |
| 253 | 235 |
| 254 for (int i = Scope::FIRST_INCREMENTAL_SCOPE; | |
| 255 i <= Scope::LAST_INCREMENTAL_SCOPE; i++) { | |
| 256 current_.incremental_marking_scopes[i] = incremental_marking_scopes_[i]; | |
| 257 } | |
| 258 | |
| 259 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs(); | 236 current_.end_time = heap_->MonotonicallyIncreasingTimeInMs(); |
| 260 current_.end_object_size = heap_->SizeOfObjects(); | 237 current_.end_object_size = heap_->SizeOfObjects(); |
| 261 current_.end_memory_size = heap_->memory_allocator()->Size(); | 238 current_.end_memory_size = heap_->memory_allocator()->Size(); |
| 262 current_.end_holes_size = CountTotalHolesSize(heap_); | 239 current_.end_holes_size = CountTotalHolesSize(heap_); |
| 263 current_.survived_new_space_object_size = heap_->SurvivedNewSpaceObjectSize(); | 240 current_.survived_new_space_object_size = heap_->SurvivedNewSpaceObjectSize(); |
| 264 | 241 |
| 265 AddAllocation(current_.end_time); | 242 AddAllocation(current_.end_time); |
| 266 | 243 |
| 267 int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB); | 244 int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB); |
| 268 int used_memory = static_cast<int>(current_.end_object_size / KB); | 245 int used_memory = static_cast<int>(current_.end_object_size / KB); |
| 269 heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample( | 246 heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample( |
| 270 current_.end_time, committed_memory); | 247 current_.end_time, committed_memory); |
| 271 heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample( | 248 heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample( |
| 272 current_.end_time, used_memory); | 249 current_.end_time, used_memory); |
| 273 | 250 |
| 274 double duration = current_.end_time - current_.start_time; | 251 double duration = current_.end_time - current_.start_time; |
| 275 | 252 |
| 276 if (current_.type == Event::SCAVENGER) { | 253 if (current_.type == Event::SCAVENGER) { |
| 277 MergeBaseline(previous_); | |
| 278 recorded_scavenges_total_.Push( | 254 recorded_scavenges_total_.Push( |
| 279 MakeBytesAndDuration(current_.new_space_object_size, duration)); | 255 MakeBytesAndDuration(current_.new_space_object_size, duration)); |
| 280 recorded_scavenges_survived_.Push(MakeBytesAndDuration( | 256 recorded_scavenges_survived_.Push(MakeBytesAndDuration( |
| 281 current_.survived_new_space_object_size, duration)); | 257 current_.survived_new_space_object_size, duration)); |
| 282 } else if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) { | 258 } else if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) { |
| 283 MergeBaseline(previous_incremental_mark_compactor_event_); | 259 current_.incremental_marking_bytes = incremental_marking_bytes_; |
| 284 recorded_incremental_marking_steps_.Push( | 260 current_.incremental_marking_duration = incremental_marking_duration_; |
| 285 MakeBytesAndDuration(current_.incremental_marking_bytes, | 261 for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) { |
| 286 current_.pure_incremental_marking_duration)); | 262 current_.incremental_marking_scopes[i] = incremental_marking_scopes_[i]; |
| 263 current_.scopes[i] = incremental_marking_scopes_[i].duration; |
| 264 } |
| 265 RecordIncrementalMarkingSpeed(current_.incremental_marking_bytes, |
| 266 current_.incremental_marking_duration); |
| 287 recorded_incremental_mark_compacts_.Push( | 267 recorded_incremental_mark_compacts_.Push( |
| 288 MakeBytesAndDuration(current_.start_object_size, duration)); | 268 MakeBytesAndDuration(current_.start_object_size, duration)); |
| 269 ResetIncrementalMarkingCounters(); |
| 289 combined_mark_compact_speed_cache_ = 0.0; | 270 combined_mark_compact_speed_cache_ = 0.0; |
| 290 for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) { | |
| 291 incremental_marking_scopes_[i].ResetCurrentCycle(); | |
| 292 } | |
| 293 } else { | 271 } else { |
| 294 DCHECK(current_.incremental_marking_bytes == 0); | 272 DCHECK_EQ(0, current_.incremental_marking_bytes); |
| 295 DCHECK(current_.pure_incremental_marking_duration == 0); | 273 DCHECK_EQ(0, current_.incremental_marking_duration); |
| 296 recorded_mark_compacts_.Push( | 274 recorded_mark_compacts_.Push( |
| 297 MakeBytesAndDuration(current_.start_object_size, duration)); | 275 MakeBytesAndDuration(current_.start_object_size, duration)); |
| 276 ResetIncrementalMarkingCounters(); |
| 298 combined_mark_compact_speed_cache_ = 0.0; | 277 combined_mark_compact_speed_cache_ = 0.0; |
| 299 for (int i = 0; i < Scope::NUMBER_OF_INCREMENTAL_SCOPES; i++) { | |
| 300 incremental_marking_scopes_[i].ResetCurrentCycle(); | |
| 301 } | |
| 302 } | 278 } |
| 303 | 279 |
| 304 heap_->UpdateTotalGCTime(duration); | 280 heap_->UpdateTotalGCTime(duration); |
| 305 | 281 |
| 306 if (current_.type == Event::SCAVENGER && FLAG_trace_gc_ignore_scavenger) | 282 if (current_.type == Event::SCAVENGER && FLAG_trace_gc_ignore_scavenger) |
| 307 return; | 283 return; |
| 308 | 284 |
| 309 if (FLAG_trace_gc_nvp) { | 285 if (FLAG_trace_gc_nvp) { |
| 310 PrintNVP(); | 286 PrintNVP(); |
| 311 } else { | 287 } else { |
| (...skipping 67 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 379 MakeBytesAndDuration(live_bytes_compacted, duration)); | 355 MakeBytesAndDuration(live_bytes_compacted, duration)); |
| 380 } | 356 } |
| 381 | 357 |
| 382 | 358 |
| 383 void GCTracer::AddSurvivalRatio(double promotion_ratio) { | 359 void GCTracer::AddSurvivalRatio(double promotion_ratio) { |
| 384 recorded_survival_ratios_.Push(promotion_ratio); | 360 recorded_survival_ratios_.Push(promotion_ratio); |
| 385 } | 361 } |
| 386 | 362 |
| 387 | 363 |
| 388 void GCTracer::AddIncrementalMarkingStep(double duration, intptr_t bytes) { | 364 void GCTracer::AddIncrementalMarkingStep(double duration, intptr_t bytes) { |
| 389 cumulative_incremental_marking_bytes_ += bytes; | |
| 390 cumulative_incremental_marking_duration_ += duration; | |
| 391 if (bytes > 0) { | 365 if (bytes > 0) { |
| 392 cumulative_pure_incremental_marking_duration_ += duration; | 366 incremental_marking_bytes_ += bytes; |
| 367 incremental_marking_duration_ += duration; |
| 393 } | 368 } |
| 394 } | 369 } |
| 395 | 370 |
| 396 void GCTracer::Output(const char* format, ...) const { | 371 void GCTracer::Output(const char* format, ...) const { |
| 397 if (FLAG_trace_gc) { | 372 if (FLAG_trace_gc) { |
| 398 va_list arguments; | 373 va_list arguments; |
| 399 va_start(arguments, format); | 374 va_start(arguments, format); |
| 400 base::OS::VPrint(format, arguments); | 375 base::OS::VPrint(format, arguments); |
| 401 va_end(arguments); | 376 va_end(arguments); |
| 402 } | 377 } |
| (...skipping 312 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 715 const int min_speed = 1; | 690 const int min_speed = 1; |
| 716 if (speed >= max_speed) return max_speed; | 691 if (speed >= max_speed) return max_speed; |
| 717 if (speed <= min_speed) return min_speed; | 692 if (speed <= min_speed) return min_speed; |
| 718 return speed; | 693 return speed; |
| 719 } | 694 } |
| 720 | 695 |
| 721 double GCTracer::AverageSpeed(const RingBuffer<BytesAndDuration>& buffer) { | 696 double GCTracer::AverageSpeed(const RingBuffer<BytesAndDuration>& buffer) { |
| 722 return AverageSpeed(buffer, MakeBytesAndDuration(0, 0), 0); | 697 return AverageSpeed(buffer, MakeBytesAndDuration(0, 0), 0); |
| 723 } | 698 } |
| 724 | 699 |
| 700 void GCTracer::RecordIncrementalMarkingSpeed(intptr_t bytes, double duration) { |
| 701 if (duration == 0 || bytes == 0) return; |
| 702 double current_speed = bytes / duration; |
| 703 if (recorded_incremental_marking_speed_ == 0) { |
| 704 recorded_incremental_marking_speed_ = current_speed; |
| 705 } else { |
| 706 recorded_incremental_marking_speed_ = |
| 707 (recorded_incremental_marking_speed_ + current_speed) / 2; |
| 708 } |
| 709 } |
| 710 |
| 725 double GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const { | 711 double GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const { |
| 726 if (cumulative_incremental_marking_duration_ == 0.0) return 0; | 712 const int kConservativeSpeedInBytesPerMillisecond = 128 * KB; |
| 727 // We haven't completed an entire round of incremental marking, yet. | 713 if (recorded_incremental_marking_speed_ != 0) { |
| 728 // Use data from GCTracer instead of data from event buffers. | 714 return recorded_incremental_marking_speed_; |
| 729 if (recorded_incremental_marking_steps_.Count() == 0) { | |
| 730 return cumulative_incremental_marking_bytes_ / | |
| 731 cumulative_pure_incremental_marking_duration_; | |
| 732 } | 715 } |
| 733 return AverageSpeed(recorded_incremental_marking_steps_); | 716 if (incremental_marking_duration_ != 0.0) { |
| 717 return incremental_marking_bytes_ / incremental_marking_duration_; |
| 718 } |
| 719 return kConservativeSpeedInBytesPerMillisecond; |
| 734 } | 720 } |
| 735 | 721 |
| 736 double GCTracer::ScavengeSpeedInBytesPerMillisecond( | 722 double GCTracer::ScavengeSpeedInBytesPerMillisecond( |
| 737 ScavengeSpeedMode mode) const { | 723 ScavengeSpeedMode mode) const { |
| 738 if (mode == kForAllObjects) { | 724 if (mode == kForAllObjects) { |
| 739 return AverageSpeed(recorded_scavenges_total_); | 725 return AverageSpeed(recorded_scavenges_total_); |
| 740 } else { | 726 } else { |
| 741 return AverageSpeed(recorded_scavenges_survived_); | 727 return AverageSpeed(recorded_scavenges_survived_); |
| 742 } | 728 } |
| 743 } | 729 } |
| (...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 827 } | 813 } |
| 828 | 814 |
| 829 void GCTracer::ResetSurvivalEvents() { recorded_survival_ratios_.Reset(); } | 815 void GCTracer::ResetSurvivalEvents() { recorded_survival_ratios_.Reset(); } |
| 830 | 816 |
| 831 void GCTracer::NotifyIncrementalMarkingStart() { | 817 void GCTracer::NotifyIncrementalMarkingStart() { |
| 832 incremental_marking_start_time_ = heap_->MonotonicallyIncreasingTimeInMs(); | 818 incremental_marking_start_time_ = heap_->MonotonicallyIncreasingTimeInMs(); |
| 833 } | 819 } |
| 834 | 820 |
| 835 } // namespace internal | 821 } // namespace internal |
| 836 } // namespace v8 | 822 } // namespace v8 |
| OLD | NEW |