Index: src/runtime-profiler.cc |
diff --git a/src/runtime-profiler.cc b/src/runtime-profiler.cc |
index 9b7dd34ccd1d61c6fdd6075cdd4de61c24c4ffd1..00f7de476061db3529e256a168df618ec528a484 100644 |
--- a/src/runtime-profiler.cc |
+++ b/src/runtime-profiler.cc |
@@ -153,9 +153,6 @@ void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) { |
void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) { |
// See AlwaysFullCompiler (in compiler.cc) comment on why we need |
// Debug::has_break_points(). |
- ASSERT(function->IsMarkedForLazyRecompilation() || |
- function->IsMarkedForParallelRecompilation() || |
- function->IsOptimized()); |
if (!FLAG_use_osr || |
isolate_->DebuggerHasBreakPoints() || |
function->IsBuiltin()) { |
@@ -273,12 +270,21 @@ void RuntimeProfiler::OptimizeNow() { |
if (shared_code->kind() != Code::FUNCTION) continue; |
if (function->IsInRecompileQueue()) continue; |
- // Attempt OSR if we are still running unoptimized code even though the |
- // the function has long been marked or even already been optimized. |
- if (!frame->is_optimized() && |
+ if (FLAG_always_osr && |
+ shared_code->allow_osr_at_loop_nesting_level() == 0) { |
+ // Testing mode: always try an OSR compile for every function. |
+ for (int i = 0; i < Code::kMaxLoopNestingMarker; i++) { |
+ // TODO(titzer): fix AttemptOnStackReplacement to avoid this dumb loop. |
+ shared_code->set_allow_osr_at_loop_nesting_level(i); |
+ AttemptOnStackReplacement(function); |
+ } |
+ // Fall through and do a normal optimized compile as well. |
+ } else if (!frame->is_optimized() && |
(function->IsMarkedForLazyRecompilation() || |
function->IsMarkedForParallelRecompilation() || |
function->IsOptimized())) { |
+ // Attempt OSR if we are still running unoptimized code even though the |
+ // the function has long been marked or even already been optimized. |
int ticks = shared_code->profiler_ticks(); |
int allowance = kOSRCodeSizeAllowanceBase + |
ticks * kOSRCodeSizeAllowancePerTick; |