Index: src/runtime-profiler.cc |
diff --git a/src/runtime-profiler.cc b/src/runtime-profiler.cc |
index 6c9de86ef309dfe96771e2d61fca33d74b7f82d3..1b571a742925fbda610b2ae2328433a67ba55c67 100644 |
--- a/src/runtime-profiler.cc |
+++ b/src/runtime-profiler.cc |
@@ -20,6 +20,9 @@ namespace internal { |
// Number of times a function has to be seen on the stack before it is |
+// compiled for baseline. |
+static const int kProfilerTicksBeforeBaseline = 2; |
+// Number of times a function has to be seen on the stack before it is |
// optimized. |
static const int kProfilerTicksBeforeOptimization = 2; |
// If the function optimization was disabled due to high deoptimization count, |
@@ -88,13 +91,13 @@ static void GetICCounts(SharedFunctionInfo* shared, |
} |
} |
- |
-void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) { |
+static void TraceRecompile(JSFunction* function, const char* reason, |
+ const char* type) { |
if (FLAG_trace_opt && |
function->shared()->PassesFilter(FLAG_hydrogen_filter)) { |
PrintF("[marking "); |
function->ShortPrint(); |
- PrintF(" for recompilation, reason: %s", reason); |
+ PrintF(" for %s recompilation, reason: %s", type, reason); |
if (FLAG_type_info_threshold > 0) { |
int typeinfo, generic, total, type_percentage, generic_percentage; |
GetICCounts(function->shared(), &typeinfo, &generic, &total, |
@@ -105,14 +108,27 @@ void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) { |
} |
PrintF("]\n"); |
} |
+} |
- if (function->shared()->HasBytecodeArray()) { |
- function->MarkForBaseline(); |
- } else { |
- function->AttemptConcurrentOptimization(); |
- } |
+void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) { |
+ TraceRecompile(function, reason, "optimized"); |
+ |
+ // TODO(4280): Fix this to check function is compiled to baseline once we |
+ // have a standard way to check that. For now, if baseline code doesn't have |
+ // a bytecode array. |
+ DCHECK(!function->shared()->HasBytecodeArray()); |
+ function->AttemptConcurrentOptimization(); |
} |
+void RuntimeProfiler::Baseline(JSFunction* function, const char* reason) { |
+ TraceRecompile(function, reason, "baseline"); |
+ |
+ // TODO(4280): Fix this to check function is compiled for the interpreter |
+ // once we have a standard way to check that. For now function will only |
+ // have a bytecode array if compiled for the interpreter. |
+ DCHECK(function->shared()->HasBytecodeArray()); |
+ function->MarkForBaseline(); |
+} |
void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function, |
int loop_nesting_levels) { |
@@ -239,8 +255,7 @@ void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function, |
} |
} |
-void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function, |
- bool frame_optimized) { |
+void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function) { |
if (function->IsInOptimizationQueue()) return; |
SharedFunctionInfo* shared = function->shared(); |
@@ -251,49 +266,22 @@ void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function, |
// TODO(rmcilroy): Consider whether we should optimize small functions when |
// they are first seen on the stack (e.g., kMaxSizeEarlyOpt). |
- if (!frame_optimized && (function->IsMarkedForBaseline() || |
- function->IsMarkedForOptimization() || |
- function->IsMarkedForConcurrentOptimization() || |
- function->IsOptimized())) { |
+ if (function->IsMarkedForBaseline() || function->IsMarkedForOptimization() || |
+ function->IsMarkedForConcurrentOptimization() || |
+ function->IsOptimized()) { |
// TODO(rmcilroy): Support OSR in these cases. |
- |
return; |
} |
- // Do not optimize non-optimizable functions. |
- if (shared->optimization_disabled()) { |
- if (shared->deopt_count() >= FLAG_max_opt_count) { |
- // If optimization was disabled due to many deoptimizations, |
- // then check if the function is hot and try to reenable optimization. |
- if (ticks >= kProfilerTicksBeforeReenablingOptimization) { |
- shared->set_profiler_ticks(0); |
- shared->TryReenableOptimization(); |
- } |
- } |
+ if (shared->optimization_disabled() && |
+ shared->disable_optimization_reason() == kOptimizationDisabledForTest) { |
+ // Don't baseline functions which have been marked by NeverOptimizeFunction |
+ // in a test. |
return; |
} |
- if (function->IsOptimized()) return; |
- |
- if (ticks >= kProfilerTicksBeforeOptimization) { |
- int typeinfo, generic, total, type_percentage, generic_percentage; |
- GetICCounts(shared, &typeinfo, &generic, &total, &type_percentage, |
- &generic_percentage); |
- if (type_percentage >= FLAG_type_info_threshold && |
- generic_percentage <= FLAG_generic_ic_threshold) { |
- // If this particular function hasn't had any ICs patched for enough |
- // ticks, optimize it now. |
- Optimize(function, "hot and stable"); |
- } else if (ticks >= kTicksWhenNotEnoughTypeInfo) { |
- Optimize(function, "not much type info but very hot"); |
- } else { |
- if (FLAG_trace_opt_verbose) { |
- PrintF("[not yet optimizing "); |
- function->PrintName(); |
- PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total, |
- type_percentage); |
- } |
- } |
+ if (ticks >= kProfilerTicksBeforeBaseline) { |
+ Baseline(function, "hot enough for baseline"); |
} |
} |
@@ -326,7 +314,8 @@ void RuntimeProfiler::MarkCandidatesForOptimization() { |
} |
if (frame->is_interpreted()) { |
- MaybeOptimizeIgnition(function, frame->is_optimized()); |
+ DCHECK(!frame->is_optimized()); |
+ MaybeOptimizeIgnition(function); |
} else { |
MaybeOptimizeFullCodegen(function, frame_count, frame->is_optimized()); |
} |