| OLD | NEW |
| 1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
| 2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
| 3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
| 4 | 4 |
| 5 #include "src/runtime-profiler.h" | 5 #include "src/runtime-profiler.h" |
| 6 | 6 |
| 7 #include "src/assembler.h" | 7 #include "src/assembler.h" |
| 8 #include "src/ast/scopeinfo.h" | 8 #include "src/ast/scopeinfo.h" |
| 9 #include "src/base/platform/platform.h" | 9 #include "src/base/platform/platform.h" |
| 10 #include "src/bootstrapper.h" | 10 #include "src/bootstrapper.h" |
| 11 #include "src/code-stubs.h" | 11 #include "src/code-stubs.h" |
| 12 #include "src/compilation-cache.h" | 12 #include "src/compilation-cache.h" |
| 13 #include "src/execution.h" | 13 #include "src/execution.h" |
| 14 #include "src/frames-inl.h" | 14 #include "src/frames-inl.h" |
| 15 #include "src/full-codegen/full-codegen.h" | 15 #include "src/full-codegen/full-codegen.h" |
| 16 #include "src/global-handles.h" | 16 #include "src/global-handles.h" |
| 17 #include "src/interpreter/interpreter.h" |
| 17 | 18 |
| 18 namespace v8 { | 19 namespace v8 { |
| 19 namespace internal { | 20 namespace internal { |
| 20 | 21 |
| 21 | 22 |
| 22 // Number of times a function has to be seen on the stack before it is | 23 // Number of times a function has to be seen on the stack before it is |
| 23 // compiled for baseline. | 24 // compiled for baseline. |
| 24 static const int kProfilerTicksBeforeBaseline = 1; | 25 static const int kProfilerTicksBeforeBaseline = 1; |
| 25 // Number of times a function has to be seen on the stack before it is | 26 // Number of times a function has to be seen on the stack before it is |
| 26 // optimized. | 27 // optimized. |
| 27 static const int kProfilerTicksBeforeOptimization = 2; | 28 static const int kProfilerTicksBeforeOptimization = 2; |
| 28 // If the function optimization was disabled due to high deoptimization count, | 29 // If the function optimization was disabled due to high deoptimization count, |
| 29 // but the function is hot and has been seen on the stack this number of times, | 30 // but the function is hot and has been seen on the stack this number of times, |
| 30 // then we try to reenable optimization for this function. | 31 // then we try to reenable optimization for this function. |
| 31 static const int kProfilerTicksBeforeReenablingOptimization = 250; | 32 static const int kProfilerTicksBeforeReenablingOptimization = 250; |
| 32 // If a function does not have enough type info (according to | 33 // If a function does not have enough type info (according to |
| 33 // FLAG_type_info_threshold), but has seen a huge number of ticks, | 34 // FLAG_type_info_threshold), but has seen a huge number of ticks, |
| 34 // optimize it as it is. | 35 // optimize it as it is. |
| 35 static const int kTicksWhenNotEnoughTypeInfo = 100; | 36 static const int kTicksWhenNotEnoughTypeInfo = 100; |
| 36 // We only have one byte to store the number of ticks. | 37 // We only have one byte to store the number of ticks. |
| 37 STATIC_ASSERT(kProfilerTicksBeforeOptimization < 256); | 38 STATIC_ASSERT(kProfilerTicksBeforeOptimization < 256); |
| 38 STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256); | 39 STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256); |
| 39 STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256); | 40 STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256); |
| 40 | 41 |
| 41 // Maximum size in bytes of generate code for a function to allow OSR. | 42 // Maximum size in bytes of generate code for a function to allow OSR. |
| 42 static const int kOSRCodeSizeAllowanceBase = | 43 static const int kOSRCodeSizeAllowanceBase = |
| 43 100 * FullCodeGenerator::kCodeSizeMultiplier; | 44 100 * FullCodeGenerator::kCodeSizeMultiplier; |
| 45 static const int kOSRCodeSizeAllowanceBaseIgnition = |
| 46 100 * interpreter::Interpreter::kCodeSizeMultiplier; |
| 44 | 47 |
| 45 static const int kOSRCodeSizeAllowancePerTick = | 48 static const int kOSRCodeSizeAllowancePerTick = |
| 46 4 * FullCodeGenerator::kCodeSizeMultiplier; | 49 4 * FullCodeGenerator::kCodeSizeMultiplier; |
| 50 static const int kOSRCodeSizeAllowancePerTickIgnition = |
| 51 4 * interpreter::Interpreter::kCodeSizeMultiplier; |
| 47 | 52 |
| 48 // Maximum size in bytes of generated code for a function to be optimized | 53 // Maximum size in bytes of generated code for a function to be optimized |
| 49 // the very first time it is seen on the stack. | 54 // the very first time it is seen on the stack. |
| 50 static const int kMaxSizeEarlyOpt = | 55 static const int kMaxSizeEarlyOpt = |
| 51 5 * FullCodeGenerator::kCodeSizeMultiplier; | 56 5 * FullCodeGenerator::kCodeSizeMultiplier; |
| 52 | 57 |
| 53 | 58 |
| 54 RuntimeProfiler::RuntimeProfiler(Isolate* isolate) | 59 RuntimeProfiler::RuntimeProfiler(Isolate* isolate) |
| 55 : isolate_(isolate), | 60 : isolate_(isolate), |
| 56 any_ic_changed_(false) { | 61 any_ic_changed_(false) { |
| (...skipping 205 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 262 | 267 |
| 263 void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function) { | 268 void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function) { |
| 264 if (function->IsInOptimizationQueue()) return; | 269 if (function->IsInOptimizationQueue()) return; |
| 265 | 270 |
| 266 SharedFunctionInfo* shared = function->shared(); | 271 SharedFunctionInfo* shared = function->shared(); |
| 267 int ticks = shared->profiler_ticks(); | 272 int ticks = shared->profiler_ticks(); |
| 268 | 273 |
| 269 // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller | 274 // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller |
| 270 // than kMaxToplevelSourceSize. | 275 // than kMaxToplevelSourceSize. |
| 271 | 276 |
| 272 if (function->IsMarkedForBaseline() || function->IsMarkedForOptimization() || | 277 if (FLAG_ignition_osr && FLAG_always_osr) { |
| 273 function->IsMarkedForConcurrentOptimization() || | 278 AttemptOnStackReplacement(function, AbstractCode::kMaxLoopNestingMarker); |
| 274 function->IsOptimized()) { | 279 // Fall through and do a normal baseline compile as well. |
| 275 // TODO(rmcilroy): Support OSR in these cases. | 280 } else if (function->IsMarkedForBaseline() || |
| 281 function->IsMarkedForOptimization() || |
| 282 function->IsMarkedForConcurrentOptimization() || |
| 283 function->IsOptimized()) { |
| 284 // Attempt OSR if we are still running interpreted code even though the |
| 285 // the function has long been marked or even already been optimized. |
| 286 int64_t allowance = |
| 287 kOSRCodeSizeAllowanceBaseIgnition + |
| 288 static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition; |
| 289 if (FLAG_ignition_osr && shared->HasBytecodeArray() && |
| 290 shared->bytecode_array()->Size() <= allowance) { |
| 291 AttemptOnStackReplacement(function); |
| 292 } |
| 276 return; | 293 return; |
| 277 } | 294 } |
| 278 | 295 |
| 279 if (shared->optimization_disabled() && | 296 if (shared->optimization_disabled() && |
| 280 shared->disable_optimization_reason() == kOptimizationDisabledForTest) { | 297 shared->disable_optimization_reason() == kOptimizationDisabledForTest) { |
| 281 // Don't baseline functions which have been marked by NeverOptimizeFunction | 298 // Don't baseline functions which have been marked by NeverOptimizeFunction |
| 282 // in a test. | 299 // in a test. |
| 283 return; | 300 return; |
| 284 } | 301 } |
| 285 | 302 |
| 286 if (ticks >= kProfilerTicksBeforeBaseline) { | 303 if (ticks >= kProfilerTicksBeforeBaseline) { |
| 287 Baseline(function, "hot enough for baseline"); | 304 Baseline(function, "hot enough for baseline"); |
| 288 } | 305 } |
| 289 } | 306 } |
| 290 | 307 |
| 291 void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function) { | 308 void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function) { |
| 292 if (function->IsInOptimizationQueue()) return; | 309 if (function->IsInOptimizationQueue()) return; |
| 293 | 310 |
| 294 SharedFunctionInfo* shared = function->shared(); | 311 SharedFunctionInfo* shared = function->shared(); |
| 295 int ticks = shared->profiler_ticks(); | 312 int ticks = shared->profiler_ticks(); |
| 296 | 313 |
| 297 // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller | 314 // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller |
| 298 // than kMaxToplevelSourceSize. | 315 // than kMaxToplevelSourceSize. |
| 299 if (function->IsMarkedForBaseline() || function->IsMarkedForOptimization() || | 316 |
| 300 function->IsMarkedForConcurrentOptimization() || | 317 if (FLAG_ignition_osr && FLAG_always_osr) { |
| 301 function->IsOptimized()) { | 318 AttemptOnStackReplacement(function, AbstractCode::kMaxLoopNestingMarker); |
| 302 // TODO(rmcilroy): Support OSR in these cases. | 319 // Fall through and do a normal optimized compile as well. |
| 320 } else if (function->IsMarkedForBaseline() || |
| 321 function->IsMarkedForOptimization() || |
| 322 function->IsMarkedForConcurrentOptimization() || |
| 323 function->IsOptimized()) { |
| 324 // Attempt OSR if we are still running interpreted code even though the |
| 325 // the function has long been marked or even already been optimized. |
| 326 int64_t allowance = |
| 327 kOSRCodeSizeAllowanceBaseIgnition + |
| 328 static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition; |
| 329 if (FLAG_ignition_osr && shared->HasBytecodeArray() && |
| 330 shared->bytecode_array()->Size() <= allowance) { |
| 331 AttemptOnStackReplacement(function); |
| 332 } |
| 303 return; | 333 return; |
| 304 } | 334 } |
| 305 | 335 |
| 306 if (shared->optimization_disabled()) { | 336 if (shared->optimization_disabled()) { |
| 307 if (shared->deopt_count() >= FLAG_max_opt_count) { | 337 if (shared->deopt_count() >= FLAG_max_opt_count) { |
| 308 // If optimization was disabled due to many deoptimizations, | 338 // If optimization was disabled due to many deoptimizations, |
| 309 // then check if the function is hot and try to reenable optimization. | 339 // then check if the function is hot and try to reenable optimization. |
| 310 if (ticks >= kProfilerTicksBeforeReenablingOptimization) { | 340 if (ticks >= kProfilerTicksBeforeReenablingOptimization) { |
| 311 shared->set_profiler_ticks(0); | 341 shared->set_profiler_ticks(0); |
| 312 shared->TryReenableOptimization(); | 342 shared->TryReenableOptimization(); |
| (...skipping 69 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 382 DCHECK_EQ(next_tier, Compiler::OPTIMIZED); | 412 DCHECK_EQ(next_tier, Compiler::OPTIMIZED); |
| 383 MaybeOptimizeFullCodegen(function, frame_count, frame->is_optimized()); | 413 MaybeOptimizeFullCodegen(function, frame_count, frame->is_optimized()); |
| 384 } | 414 } |
| 385 } | 415 } |
| 386 any_ic_changed_ = false; | 416 any_ic_changed_ = false; |
| 387 } | 417 } |
| 388 | 418 |
| 389 | 419 |
| 390 } // namespace internal | 420 } // namespace internal |
| 391 } // namespace v8 | 421 } // namespace v8 |
| OLD | NEW |