Chromium Code Reviews| Index: src/compiler.cc |
| diff --git a/src/compiler.cc b/src/compiler.cc |
| index 1fba20fa5b923790d9ac720a69f68f2da952157c..14b1bb777ced151acde3340d97d0c1c46384385f 100644 |
| --- a/src/compiler.cc |
| +++ b/src/compiler.cc |
| @@ -963,8 +963,10 @@ bool Compiler::CompileLazy(CompilationInfo* info) { |
| } |
| -void Compiler::RecompileConcurrent(Handle<JSFunction> closure) { |
| - ASSERT(closure->IsMarkedForConcurrentRecompilation()); |
| +void Compiler::RecompileConcurrent(Handle<JSFunction> closure, |
| + uint32_t osr_pc_offset) { |
| + ASSERT(FLAG_concurrent_recompilation); |
|
titzer
2013/09/02 17:03:35
Don't assert flags; they should only be used to tu
|
| + ASSERT(osr_pc_offset != 0 || closure->IsMarkedForConcurrentRecompilation()); |
| Isolate* isolate = closure->GetIsolate(); |
| // Here we prepare compile data for the concurrent recompilation thread, but |
| @@ -982,13 +984,31 @@ void Compiler::RecompileConcurrent(Handle<JSFunction> closure) { |
| } |
| SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(closure)); |
| + Handle<SharedFunctionInfo> shared = info->shared_info(); |
| + |
| + if (osr_pc_offset != 0) { |
| + ASSERT(FLAG_speculative_concurrent_osr); |
| + // Translate pc offset into AST id. |
| + DisallowHeapAllocation no_gc; |
| + FullCodeGenerator::BackEdgeTableIterator back_edges(shared->code(), &no_gc); |
|
titzer
2013/09/02 17:03:35
It seems weird to make a BackEdgeTableIterator and
Yang
2013/09/03 08:50:10
Done.
|
| + if (!back_edges.FindPcOffset(osr_pc_offset)) UNREACHABLE(); |
| + info->SetOptimizing(back_edges.ast_id()); |
| + |
| + if (FLAG_trace_osr) { |
| + PrintF("[COSR - attempt to queue "); |
| + closure->PrintName(); |
| + PrintF(" for concurrent compilation at AST id %d, loop depth %d]\n", |
| + back_edges.ast_id().ToInt(), back_edges.loop_depth()); |
| + } |
| + } else { |
| + info->SetOptimizing(BailoutId::None()); |
| + } |
| + |
| VMState<COMPILER> state(isolate); |
| PostponeInterruptsScope postpone(isolate); |
| - Handle<SharedFunctionInfo> shared = info->shared_info(); |
| int compiled_size = shared->end_position() - shared->start_position(); |
| isolate->counters()->total_compile_size()->Increment(compiled_size); |
| - info->SetOptimizing(BailoutId::None()); |
| { |
| CompilationHandleScope handle_scope(*info); |
| @@ -1005,7 +1025,7 @@ void Compiler::RecompileConcurrent(Handle<JSFunction> closure) { |
| if (Rewriter::Rewrite(*info) && Scope::Analyze(*info)) { |
| OptimizingCompiler* compiler = |
| - new(info->zone()) OptimizingCompiler(*info); |
| + new(info->zone()) OptimizingCompiler(*info, osr_pc_offset); |
| OptimizingCompiler::Status status = compiler->CreateGraph(); |
| if (status == OptimizingCompiler::SUCCEEDED) { |
| info.Detach(); |
| @@ -1019,11 +1039,10 @@ void Compiler::RecompileConcurrent(Handle<JSFunction> closure) { |
| } |
| } |
| - if (shared->code()->back_edges_patched_for_osr()) { |
| - // At this point we either put the function on recompilation queue or |
| - // aborted optimization. In either case we want to continue executing |
| - // the unoptimized code without running into OSR. If the unoptimized |
| - // code has been patched for OSR, unpatch it. |
| + // If we don't compile for on-stack replacement in the background thread, |
| + // reset the OSR attempt to avoid recompilation being preempted by OSR. |
| + if (!FLAG_speculative_concurrent_osr && |
| + shared->code()->back_edges_patched_for_osr()) { |
| Deoptimizer::RevertInterruptCode(isolate, shared->code()); |
| } |
| @@ -1094,6 +1113,173 @@ void Compiler::InstallOptimizedCode(OptimizingCompiler* optimizing_compiler) { |
| } |
| +static uint32_t CurrentPcOffset(Isolate* isolate, |
|
titzer
2013/09/02 17:03:35
Why are you doing a search for the PC of the top f
Yang
2013/09/03 08:50:10
Already thought of that. Let me work on that on a
|
| + Handle<JSFunction> function, |
| + Handle<Code> unoptimized) { |
| + JavaScriptFrameIterator it(isolate); |
| + JavaScriptFrame* frame = it.frame(); |
| + ASSERT(frame->function() == *function); |
| + ASSERT(frame->LookupCode() == *unoptimized); |
| + ASSERT(unoptimized->contains(frame->pc())); |
| + |
| + // Use linear search of the unoptimized code's back edge table to find |
| + // the AST id matching the PC. |
| + return static_cast<uint32_t>(frame->pc() - unoptimized->instruction_start()); |
| +} |
| + |
| + |
| +static bool IsSuitableForOnStackReplacement(Isolate* isolate, |
| + Handle<JSFunction> function, |
| + Handle<Code> unoptimized) { |
| + // Keep track of whether we've succeeded in optimizing. |
| + if (!unoptimized->optimizable()) return false; |
| + // If we are trying to do OSR when there are already optimized |
| + // activations of the function, it means (a) the function is directly or |
| + // indirectly recursive and (b) an optimized invocation has been |
| + // deoptimized so that we are currently in an unoptimized activation. |
| + // Check for optimized activations of this function. |
| + for (JavaScriptFrameIterator it(isolate); !it.done(); it.Advance()) { |
| + JavaScriptFrame* frame = it.frame(); |
| + if (frame->is_optimized() && frame->function() == *function) return false; |
| + } |
| + |
| + return true; |
| +} |
| + |
| + |
| +bool Compiler::CompileForOnStackReplacement(Handle<JSFunction> function, |
| + BailoutId* ast_id) { |
| + Isolate* isolate = function->GetIsolate(); |
| + // We have hit a back edge in an unoptimized frame for a function that was |
| + // selected for on-stack replacement. Find the unoptimized code object. |
| + Handle<Code> unoptimized(function->shared()->code(), isolate); |
| + |
| + Deoptimizer::RevertInterruptCode(isolate, *unoptimized); |
| + if (FLAG_trace_osr) { |
| + PrintF("[OSR - restored original interrupt calls in "); |
| + function->PrintName(); |
| + PrintF("]\n"); |
| + } |
| + |
| + if (!IsSuitableForOnStackReplacement(isolate, function, unoptimized)) { |
| + return false; |
| + } |
| + |
| + uint32_t pc_offset = CurrentPcOffset(isolate, function, unoptimized); |
| + |
| + { DisallowHeapAllocation no_gc; |
| + FullCodeGenerator::BackEdgeTableIterator back_edges(*unoptimized, &no_gc); |
| + if (!back_edges.FindPcOffset(pc_offset)) UNREACHABLE(); |
| + *ast_id = back_edges.ast_id(); |
| + |
| + if (FLAG_trace_osr) { |
| + PrintF("[OSR - replacing at AST id %d, loop depth %d in ", |
| + ast_id->ToInt(), back_edges.loop_depth()); |
| + function->PrintName(); |
| + PrintF("]\n"); |
| + } |
| + } |
| + |
| + // Try to compile the optimized code. A true return value from |
| + // CompileOptimized means that compilation succeeded, not necessarily |
| + // that optimization succeeded. |
| + if (JSFunction::CompileOptimized(function, *ast_id, CLEAR_EXCEPTION) && |
| + function->IsOptimized()) { |
| + DeoptimizationInputData* data = DeoptimizationInputData::cast( |
| + function->code()->deoptimization_data()); |
| + if (data->OsrPcOffset()->value() >= 0) { |
| + if (FLAG_trace_osr) { |
| + PrintF("[OSR - entry, offset %d in optimized code]\n", |
| + data->OsrPcOffset()->value()); |
| + } |
| + ASSERT(BailoutId(data->OsrAstId()->value()) == *ast_id); |
| + return true; |
| + } |
| + } else { |
| + if (FLAG_trace_osr) { |
| + PrintF("[OSR - optimization failed for "); |
| + function->PrintName(); |
| + PrintF("]\n"); |
| + } |
| + } |
| + return false; |
| +} |
| + |
| + |
| +bool Compiler::CompileForConcurrentOSR(Handle<JSFunction> function, |
| + BailoutId* ast_id) { |
| + ASSERT(FLAG_concurrent_recompilation && FLAG_speculative_concurrent_osr); |
| + |
| + Isolate* isolate = function->GetIsolate(); |
| + Handle<Code> unoptimized(function->shared()->code(), isolate); |
| + |
| + uint32_t pc_offset = CurrentPcOffset(isolate, function, unoptimized); |
| + |
| + if (isolate->optimizing_compiler_thread()-> |
| + IsQueuedForOSR(function, pc_offset)) { |
| + // Still waiting for the optimizing compiler thread to finish. Carry on. |
| + if (FLAG_trace_osr) { |
| + PrintF("[COSR - polling recompile tasks for "); |
| + function->PrintName(); |
| + PrintF("]\n"); |
| + } |
| + return false; |
| + } |
| + |
| + OptimizingCompiler* compiler = isolate->optimizing_compiler_thread()-> |
| + FindReadyOSRCandidate(function, pc_offset); |
| + |
| + if (compiler != NULL) { |
| + if (FLAG_trace_osr) { |
| + PrintF("[COSR - optimization complete for "); |
| + function->PrintName(); |
| + PrintF(", restoring interrupt calls]\n"); |
| + } |
| + Deoptimizer::RevertInterruptCode(isolate, *unoptimized); |
| + |
| + *ast_id = compiler->info()->osr_ast_id(); |
| + |
| + InstallOptimizedCode(compiler); |
| + isolate->optimizing_compiler_thread()->RemoveStaleOSRCandidates(); |
| + |
| + if (!function->IsOptimized()) { |
| + if (FLAG_trace_osr) { |
| + PrintF("[COSR - optimization failed for "); |
| + function->PrintName(); |
| + PrintF("]\n"); |
| + } |
| + return false; |
| + } |
| + |
| + DeoptimizationInputData* data = DeoptimizationInputData::cast( |
| + function->code()->deoptimization_data()); |
| + |
| + if (data->OsrPcOffset()->value() >= 0) { |
| + ASSERT(BailoutId(data->OsrAstId()->value()) == *ast_id); |
| + if (FLAG_trace_osr) { |
| + PrintF("[COSR - entry at AST id %d, offset %d in optimized code]\n", |
| + ast_id->ToInt(), data->OsrPcOffset()->value()); |
| + } |
| + return true; |
| + } |
| + return false; |
| + } |
| + |
| + if (!IsSuitableForOnStackReplacement(isolate, function, unoptimized)) { |
| + if (FLAG_trace_osr) { |
| + PrintF("[COSR - "); |
| + function->PrintName(); |
| + PrintF(" is unsuitable, restoring interrupt calls]\n"); |
| + } |
| + Deoptimizer::RevertInterruptCode(isolate, *unoptimized); |
| + return false; |
| + } |
| + |
| + RecompileConcurrent(function, pc_offset); |
| + return false; |
| +} |
| + |
| + |
| Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal, |
| Handle<Script> script) { |
| // Precondition: code has been parsed and scopes have been analyzed. |