Chromium Code Reviews| Index: src/runtime-profiler.cc |
| diff --git a/src/runtime-profiler.cc b/src/runtime-profiler.cc |
| index df94ddfa2e687e139addb1bed79cbfe598777b77..6aaf61bbea33f43d4c42bcd840bd1605278af4cc 100644 |
| --- a/src/runtime-profiler.cc |
| +++ b/src/runtime-profiler.cc |
| @@ -55,6 +55,41 @@ static const int kOSRCodeSizeAllowancePerTickIgnition = |
| static const int kMaxSizeEarlyOpt = |
| 5 * FullCodeGenerator::kCodeSizeMultiplier; |
| +#define OPTIMIZATION_REASON_LIST(V) \ |
| + V(DoNotOptimize, "do not optimize") \ |
| + V(HotAndStable, "hot and stable") \ |
| + V(HotEnoughForBaseline, "hot enough for baseline") \ |
| + V(HotWithoutMuchTypeInfo, "not much type info but very hot") \ |
| + V(SmallFunction, "small function") |
| + |
| +enum class OptimizationReason : uint8_t { |
| +#define OPTIMIZATION_REASON_CONSTANTS(Constant, message) k##Constant, |
| + OPTIMIZATION_REASON_LIST(OPTIMIZATION_REASON_CONSTANTS) |
| +#undef OPTIMIZATION_REASON_CONSTANTS |
| +}; |
| + |
| +char const* OptimizationReasonToString(OptimizationReason reason) { |
| + static char const* reasons[] = { |
| +#define OPTIMIZATION_REASON_TEXTS(Constant, message) message, |
| + OPTIMIZATION_REASON_LIST(OPTIMIZATION_REASON_TEXTS) |
| +#undef OPTIMIZATION_REASON_TEXTS |
| + }; |
| + size_t const index = static_cast<size_t>(reason); |
| + DCHECK_LT(index, arraysize(reasons)); |
| + return reasons[index]; |
| +} |
| + |
| +std::ostream& operator<<(std::ostream& os, OptimizationReason reason) { |
| + switch (reason) { |
|
rmcilroy
2016/09/23 08:10:34
Just call OptimizationReasonToString rather than a
klaasb
2016/09/23 08:31:39
Done.
|
| +#define OPTIMIZATION_REASON_NAMES(Constant, message) \ |
| + case OptimizationReason::k##Constant: \ |
| + return os << #Constant; |
| + OPTIMIZATION_REASON_LIST(OPTIMIZATION_REASON_NAMES) |
| +#undef OPTIMIZATION_REASON_NAMES |
| + } |
| + UNREACHABLE(); |
| + return os; |
| +} |
| RuntimeProfiler::RuntimeProfiler(Isolate* isolate) |
| : isolate_(isolate), |
| @@ -80,11 +115,14 @@ static void GetICCounts(JSFunction* function, int* ic_with_type_info_count, |
| // Harvest vector-ics as well |
| TypeFeedbackVector* vector = function->feedback_vector(); |
| - int with = 0, gen = 0; |
| + int with = 0, gen = 0, interpreter_total = 0; |
|
rmcilroy
2016/09/23 08:10:34
nit - type_vector_ic_count
klaasb
2016/09/23 08:31:40
Done.
|
| const bool is_interpreted = |
| function->shared()->code()->is_interpreter_trampoline_builtin(); |
| - vector->ComputeCounts(&with, &gen, is_interpreted); |
| + vector->ComputeCounts(&with, &gen, &interpreter_total, is_interpreted); |
| + if (is_interpreted) { |
| + *ic_total_count = interpreter_total; |
|
rmcilroy
2016/09/23 08:10:34
DCHECK ic_total_count is zero beforehand
klaasb
2016/09/23 08:31:40
Done.
|
| + } |
| *ic_with_type_info_count += with; |
| *ic_generic_count += gen; |
| @@ -116,13 +154,17 @@ static void TraceRecompile(JSFunction* function, const char* reason, |
| } |
| } |
| -void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) { |
| - TraceRecompile(function, reason, "optimized"); |
| +void RuntimeProfiler::Optimize(JSFunction* function, |
| + OptimizationReason reason) { |
| + DCHECK_NE(reason, OptimizationReason::kDoNotOptimize); |
| + TraceRecompile(function, OptimizationReasonToString(reason), "optimized"); |
| function->AttemptConcurrentOptimization(); |
| } |
| -void RuntimeProfiler::Baseline(JSFunction* function, const char* reason) { |
| - TraceRecompile(function, reason, "baseline"); |
| +void RuntimeProfiler::Baseline(JSFunction* function, |
| + OptimizationReason reason) { |
| + DCHECK_NE(reason, OptimizationReason::kDoNotOptimize); |
| + TraceRecompile(function, OptimizationReasonToString(reason), "baseline"); |
| // TODO(4280): Fix this to check function is compiled for the interpreter |
| // once we have a standard way to check that. For now function will only |
| @@ -241,9 +283,9 @@ void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function, |
| generic_percentage <= FLAG_generic_ic_threshold) { |
| // If this particular function hasn't had any ICs patched for enough |
| // ticks, optimize it now. |
| - Optimize(function, "hot and stable"); |
| + Optimize(function, OptimizationReason::kHotAndStable); |
| } else if (ticks >= kTicksWhenNotEnoughTypeInfo) { |
| - Optimize(function, "not much type info but very hot"); |
| + Optimize(function, OptimizationReason::kHotWithoutMuchTypeInfo); |
| } else { |
| shared_code->set_profiler_ticks(ticks + 1); |
| if (FLAG_trace_opt_verbose) { |
| @@ -262,7 +304,7 @@ void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function, |
| &generic_percentage); |
| if (type_percentage >= FLAG_type_info_threshold && |
| generic_percentage <= FLAG_generic_ic_threshold) { |
| - Optimize(function, "small function"); |
| + Optimize(function, OptimizationReason::kSmallFunction); |
| } else { |
| shared_code->set_profiler_ticks(ticks + 1); |
| } |
| @@ -275,31 +317,11 @@ void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function, |
| JavaScriptFrame* frame) { |
| if (function->IsInOptimizationQueue()) return; |
| + if (MaybeOSRIgnition(function, frame)) return; |
| + |
| SharedFunctionInfo* shared = function->shared(); |
| int ticks = shared->profiler_ticks(); |
| - // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller |
| - // than kMaxToplevelSourceSize. |
| - |
| - if (FLAG_always_osr) { |
| - AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker); |
| - // Fall through and do a normal baseline compile as well. |
| - } else if (!frame->is_optimized() && |
| - (function->IsMarkedForBaseline() || |
| - function->IsMarkedForOptimization() || |
| - function->IsMarkedForConcurrentOptimization() || |
| - function->IsOptimized())) { |
| - // Attempt OSR if we are still running interpreted code even though the |
| - // the function has long been marked or even already been optimized. |
| - int64_t allowance = |
| - kOSRCodeSizeAllowanceBaseIgnition + |
| - static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition; |
| - if (shared->bytecode_array()->Size() <= allowance) { |
| - AttemptOnStackReplacement(frame); |
| - } |
| - return; |
| - } |
| - |
| if (shared->optimization_disabled() && |
| shared->disable_optimization_reason() == kOptimizationDisabledForTest) { |
| // Don't baseline functions which have been marked by NeverOptimizeFunction |
| @@ -308,7 +330,7 @@ void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function, |
| } |
| if (ticks >= kProfilerTicksBeforeBaseline) { |
| - Baseline(function, "hot enough for baseline"); |
| + Baseline(function, OptimizationReason::kHotEnoughForBaseline); |
| } |
| } |
| @@ -316,6 +338,36 @@ void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function, |
| JavaScriptFrame* frame) { |
| if (function->IsInOptimizationQueue()) return; |
| + if (MaybeOSRIgnition(function, frame)) return; |
| + |
| + SharedFunctionInfo* shared = function->shared(); |
| + int ticks = shared->profiler_ticks(); |
| + |
| + if (shared->optimization_disabled()) { |
| + if (shared->deopt_count() >= FLAG_max_opt_count) { |
| + // If optimization was disabled due to many deoptimizations, |
| + // then check if the function is hot and try to reenable optimization. |
| + if (ticks >= kProfilerTicksBeforeReenablingOptimization) { |
| + shared->set_profiler_ticks(0); |
| + shared->TryReenableOptimization(); |
| + } |
| + } |
| + return; |
| + } |
| + |
| + if (function->IsOptimized()) return; |
| + |
| + auto reason = OptimizationHeuristicIgnition(function, frame); |
|
rmcilroy
2016/09/23 08:10:34
Don't use auto unless it improves readability (htt
klaasb
2016/09/23 08:31:40
Done.
|
| + |
| + if (reason != OptimizationReason::kDoNotOptimize) { |
| + Optimize(function, reason); |
| + } |
| +} |
| + |
| +bool RuntimeProfiler::MaybeOSRIgnition(JSFunction* function, |
| + JavaScriptFrame* frame) { |
| + if (!FLAG_ignition_osr) return false; |
| + |
| SharedFunctionInfo* shared = function->shared(); |
| int ticks = shared->profiler_ticks(); |
| @@ -324,9 +376,12 @@ void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function, |
| if (FLAG_always_osr) { |
| AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker); |
| - // Fall through and do a normal optimized compile as well. |
| + // Do a normal baseline/optimized compile as well. |
| + return false; |
|
rmcilroy
2016/09/23 08:10:34
Hmm, not keen on this. Could you just pull out the
klaasb
2016/09/23 08:31:40
Done.
|
| } else if (!frame->is_optimized() && |
| - (function->IsMarkedForBaseline() || |
| + ((function->IsMarkedForBaseline() && |
| + OptimizationHeuristicIgnition(function, frame) != |
| + OptimizationReason::kDoNotOptimize) || |
|
rmcilroy
2016/09/23 08:10:34
Could you pull this out to a separate local for cl
klaasb
2016/09/23 08:31:40
Done.
|
| function->IsMarkedForOptimization() || |
| function->IsMarkedForConcurrentOptimization() || |
| function->IsOptimized())) { |
| @@ -338,21 +393,15 @@ void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function, |
| if (shared->bytecode_array()->Size() <= allowance) { |
| AttemptOnStackReplacement(frame); |
| } |
| - return; |
| + return true; |
| } |
| + return false; |
| +} |
| - if (shared->optimization_disabled()) { |
| - if (shared->deopt_count() >= FLAG_max_opt_count) { |
| - // If optimization was disabled due to many deoptimizations, |
| - // then check if the function is hot and try to reenable optimization. |
| - if (ticks >= kProfilerTicksBeforeReenablingOptimization) { |
| - shared->set_profiler_ticks(0); |
| - shared->TryReenableOptimization(); |
| - } |
| - } |
| - return; |
| - } |
| - if (function->IsOptimized()) return; |
| +OptimizationReason RuntimeProfiler::OptimizationHeuristicIgnition( |
| + JSFunction* function, JavaScriptFrame* frame) { |
| + SharedFunctionInfo* shared = function->shared(); |
| + int ticks = shared->profiler_ticks(); |
| if (ticks >= kProfilerTicksBeforeOptimization) { |
| int typeinfo, generic, total, type_percentage, generic_percentage; |
| @@ -362,9 +411,9 @@ void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function, |
| generic_percentage <= FLAG_generic_ic_threshold) { |
| // If this particular function hasn't had any ICs patched for enough |
| // ticks, optimize it now. |
| - Optimize(function, "hot and stable"); |
| + return OptimizationReason::kHotAndStable; |
| } else if (ticks >= kTicksWhenNotEnoughTypeInfo) { |
| - Optimize(function, "not much type info but very hot"); |
| + return OptimizationReason::kHotWithoutMuchTypeInfo; |
| } else { |
| if (FLAG_trace_opt_verbose) { |
| PrintF("[not yet optimizing "); |
| @@ -372,10 +421,12 @@ void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function, |
| PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total, |
| type_percentage); |
| } |
| + return OptimizationReason::kDoNotOptimize; |
| } |
| } |
| // TODO(rmcilroy): Consider whether we should optimize small functions when |
| // they are first seen on the stack (e.g., kMaxSizeEarlyOpt). |
| + return OptimizationReason::kDoNotOptimize; |
| } |
| void RuntimeProfiler::MarkCandidatesForOptimization() { |
| @@ -423,6 +474,5 @@ void RuntimeProfiler::MarkCandidatesForOptimization() { |
| any_ic_changed_ = false; |
| } |
| - |
| } // namespace internal |
| } // namespace v8 |