Index: src/runtime-profiler.cc |
diff --git a/src/runtime-profiler.cc b/src/runtime-profiler.cc |
index f483c7d06c5fd7632aee1bdbf8c605d7cb1d9ca9..90764194f67f0a40c13ba8d8a1f35be8273b65f8 100644 |
--- a/src/runtime-profiler.cc |
+++ b/src/runtime-profiler.cc |
@@ -269,10 +269,23 @@ void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function) { |
// TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller |
// than kMaxToplevelSourceSize. |
- if (function->IsMarkedForBaseline() || function->IsMarkedForOptimization() || |
- function->IsMarkedForConcurrentOptimization() || |
- function->IsOptimized()) { |
- // TODO(rmcilroy): Support OSR in these cases. |
+ if (FLAG_ignition_osr && FLAG_always_osr) { |
+ AttemptOnStackReplacement(function, AbstractCode::kMaxLoopNestingMarker); |
+ // Fall through and do a normal baseline compile as well. |
+ } else if (function->IsMarkedForBaseline() || |
+ function->IsMarkedForOptimization() || |
+ function->IsMarkedForConcurrentOptimization() || |
+ function->IsOptimized()) { |
+ // Attempt OSR if we are still running interpreted code even though the |
+ // the function has long been marked or even already been optimized. |
+ int ticks = shared->profiler_ticks(); |
rmcilroy
2016/07/28 16:19:21
no need for this - just use ticks which is already
Michael Starzinger
2016/07/28 17:57:24
Done.
|
+ int64_t allowance = |
+ kOSRCodeSizeAllowanceBase + |
+ static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTick; |
rmcilroy
2016/07/28 16:19:21
kOSRCodeSizeAllowanceBase and kOSRCodeSizeAllowanc
Michael Starzinger
2016/07/28 17:57:24
Done.
|
+ if (FLAG_ignition_osr && shared->HasBytecodeArray() && |
+ shared->bytecode_array()->Size() <= allowance) { |
+ AttemptOnStackReplacement(function); |
+ } |
return; |
} |
@@ -296,10 +309,24 @@ void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function) { |
// TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller |
// than kMaxToplevelSourceSize. |
- if (function->IsMarkedForBaseline() || function->IsMarkedForOptimization() || |
- function->IsMarkedForConcurrentOptimization() || |
- function->IsOptimized()) { |
- // TODO(rmcilroy): Support OSR in these cases. |
+ |
+ if (FLAG_ignition_osr && FLAG_always_osr) { |
+ AttemptOnStackReplacement(function, AbstractCode::kMaxLoopNestingMarker); |
+ // Fall through and do a normal optimized compile as well. |
+ } else if (function->IsMarkedForBaseline() || |
+ function->IsMarkedForOptimization() || |
+ function->IsMarkedForConcurrentOptimization() || |
+ function->IsOptimized()) { |
+ // Attempt OSR if we are still running interpreted code even though the |
+ // the function has long been marked or even already been optimized. |
+ int ticks = shared->profiler_ticks(); |
+ int64_t allowance = |
+ kOSRCodeSizeAllowanceBase + |
+ static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTick; |
+ if (FLAG_ignition_osr && shared->HasBytecodeArray() && |
+ shared->bytecode_array()->Size() <= allowance) { |
+ AttemptOnStackReplacement(function); |
+ } |
return; |
} |