Chromium Code Reviews| Index: runtime/vm/flow_graph_compiler_dbc.cc |
| diff --git a/runtime/vm/flow_graph_compiler_dbc.cc b/runtime/vm/flow_graph_compiler_dbc.cc |
| index 92ba799b2a277e1b4d20667c0e2e711498c03315..75bac6003e56d2c8a3b2cc325f7d34c7ba072c2f 100644 |
| --- a/runtime/vm/flow_graph_compiler_dbc.cc |
| +++ b/runtime/vm/flow_graph_compiler_dbc.cc |
| @@ -90,8 +90,121 @@ void FlowGraphCompiler::ExitIntrinsicMode() { |
| RawTypedData* CompilerDeoptInfo::CreateDeoptInfo(FlowGraphCompiler* compiler, |
| DeoptInfoBuilder* builder, |
| const Array& deopt_table) { |
| - UNIMPLEMENTED(); |
| - return TypedData::null(); |
| + if (deopt_env_ == NULL) { |
| + ++builder->current_info_number_; |
| + return TypedData::null(); |
| + } |
| + |
| + intptr_t stack_height = compiler->StackSize(); |
| + AllocateIncomingParametersRecursive(deopt_env_, &stack_height); |
| + |
| + intptr_t slot_ix = 0; |
| + Environment* current = deopt_env_; |
| + |
| + // Emit all kMaterializeObject instructions describing objects to be |
| + // materialized on the deoptimization as a prefix to the deoptimization info. |
| + EmitMaterializations(deopt_env_, builder); |
| + |
| + // The real frame starts here. |
| + builder->MarkFrameStart(); |
| + |
| + Zone* zone = compiler->zone(); |
| + |
| + builder->AddCallerFp(slot_ix++); |
| + builder->AddReturnAddress(current->function(), deopt_id(), slot_ix++); |
| + builder->AddPcMarker(Function::ZoneHandle(zone), slot_ix++); |
| + builder->AddConstant(Function::ZoneHandle(zone), slot_ix++); |
| + |
| + // Emit all values that are needed for materialization as a part of the |
| + // expression stack for the bottom-most frame. This guarantees that GC |
| + // will be able to find them during materialization. |
| + slot_ix = builder->EmitMaterializationArguments(slot_ix); |
| + |
| + // For the innermost environment, set outgoing arguments and the locals. |
| + for (intptr_t i = current->Length() - 1; |
| + i >= current->fixed_parameter_count(); |
| + i--) { |
| + builder->AddCopy(current->ValueAt(i), current->LocationAt(i), slot_ix++); |
| + } |
| + |
|
zra
2016/05/19 16:24:27
Extra newline.
Vyacheslav Egorov (Google)
2016/05/20 12:11:47
Done.
|
| + |
| + builder->AddCallerFp(slot_ix++); |
| + |
| + Environment* previous = current; |
| + current = current->outer(); |
| + while (current != NULL) { |
| + // For any outer environment the deopt id is that of the call instruction |
| + // which is recorded in the outer environment. |
| + builder->AddReturnAddress( |
| + current->function(), |
| + Thread::ToDeoptAfter(current->deopt_id()), |
| + slot_ix++); |
| + |
| + builder->AddPcMarker(previous->function(), slot_ix++); |
| + builder->AddConstant(previous->function(), slot_ix++); |
| + |
| + // The values of outgoing arguments can be changed from the inlined call so |
| + // we must read them from the previous environment. |
| + for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { |
| + builder->AddCopy(previous->ValueAt(i), |
| + previous->LocationAt(i), |
| + slot_ix++); |
| + } |
| + |
| + // Set the locals, note that outgoing arguments are not in the environment. |
| + for (intptr_t i = current->Length() - 1; |
| + i >= current->fixed_parameter_count(); |
| + i--) { |
| + builder->AddCopy(current->ValueAt(i), |
| + current->LocationAt(i), |
| + slot_ix++); |
| + } |
| + |
| + builder->AddCallerFp(slot_ix++); |
| + |
| + // Iterate on the outer environment. |
| + previous = current; |
| + current = current->outer(); |
| + } |
| + // The previous pointer is now the outermost environment. |
| + ASSERT(previous != NULL); |
| + |
| + // For the outermost environment, set caller PC. |
| + builder->AddCallerPc(slot_ix++); |
| + |
| + builder->AddPcMarker(previous->function(), slot_ix++); |
| + builder->AddConstant(previous->function(), slot_ix++); |
| + |
| + |
| + // For the outermost environment, set the incoming arguments. |
| + for (intptr_t i = previous->fixed_parameter_count() - 1; i >= 0; i--) { |
| + builder->AddCopy(previous->ValueAt(i), previous->LocationAt(i), slot_ix++); |
| + } |
| + |
| + return builder->CreateDeoptInfo(deopt_table); |
| +} |
| + |
| + |
| +void FlowGraphCompiler::RecordAfterCall(Instruction* instr) { |
| + RecordSafepoint(instr->locs()); |
| + // Marks either the continuation point in unoptimized code or the |
| + // deoptimization point in optimized code, after call. |
| + const intptr_t deopt_id_after = Thread::ToDeoptAfter(instr->deopt_id()); |
| + if (is_optimizing()) { |
| + // Return/ReturnTOS instruction drops incoming arguments so |
| + // we have to drop outgoing arguments from the innermost environment. |
| + // On all other architectures caller drops outgoing arguments itself |
| + // hence the difference. |
| + pending_deoptimization_env_->DropArguments(instr->ArgumentCount()); |
| + AddDeoptIndexAtCall(deopt_id_after, instr->token_pos()); |
| + } else { |
| + // Add deoptimization continuation point after the call and before the |
| + // arguments are removed. |
| + // In optimized code this descriptor is needed for exception handling. |
| + AddCurrentDescriptor(RawPcDescriptors::kDeopt, |
| + deopt_id_after, |
| + instr->token_pos()); |
| + } |
| } |
| @@ -109,16 +222,23 @@ void FlowGraphCompiler::GenerateAssertAssignable(TokenPosition token_pos, |
| const AbstractType& dst_type, |
| const String& dst_name, |
| LocationSummary* locs) { |
| - ASSERT(!is_optimizing()); |
| SubtypeTestCache& test_cache = SubtypeTestCache::Handle(); |
| if (!dst_type.IsVoidType() && dst_type.IsInstantiated()) { |
| test_cache = SubtypeTestCache::New(); |
| } |
| + if (is_optimizing()) { |
| + __ Push(locs->in(0).reg()); |
| + __ Push(locs->in(1).reg()); |
| + } |
| __ PushConstant(dst_type); |
| __ PushConstant(dst_name); |
| __ AssertAssignable(__ AddConstant(test_cache)); |
| + RecordSafepoint(locs); |
| AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id, token_pos); |
| + if (is_optimizing()) { |
| + __ Drop1(); |
|
Florian Schneider
2016/05/19 13:21:40
__ Drop(2)?
Vyacheslav Egorov (Google)
2016/05/19 15:19:40
Explained why Drop(1)
|
| + } |
| } |
| @@ -168,10 +288,21 @@ void FlowGraphCompiler::EmitFrameEntry() { |
| const intptr_t context_index = |
| -parsed_function().current_context_var()->index() - 1; |
| + if (CanOptimizeFunction() && |
| + function.IsOptimizable() && |
| + (!is_optimizing() || may_reoptimize())) { |
| + __ HotCheck(!is_optimizing(), GetOptimizationThreshold()); |
| + } |
| + |
| if (has_optional_params) { |
| - __ EntryOpt(num_fixed_params, num_opt_pos_params, num_opt_named_params); |
| - } else { |
| + __ EntryOptional(num_fixed_params, |
| + num_opt_pos_params, |
| + num_opt_named_params); |
| + } else if (!is_optimizing()) { |
| __ Entry(num_fixed_params, num_locals, context_index); |
| + } else { |
| + __ EntryOptimized(num_fixed_params, |
| + flow_graph_.graph_entry()->spill_slot_count()); |
| } |
| if (num_opt_named_params != 0) { |
| @@ -212,14 +343,20 @@ void FlowGraphCompiler::EmitFrameEntry() { |
| } |
| - ASSERT(num_locals > 0); // There is always at least context_var. |
| if (has_optional_params) { |
| - ASSERT(!is_optimizing()); |
| - __ Frame(num_locals); // Reserve space for locals. |
| + if (!is_optimizing()) { |
| + ASSERT(num_locals > 0); // There is always at least context_var. |
| + __ Frame(num_locals); // Reserve space for locals. |
| + } else if (flow_graph_.graph_entry()->spill_slot_count() > |
| + flow_graph_.num_copied_params()) { |
| + __ Frame(flow_graph_.graph_entry()->spill_slot_count() - |
| + flow_graph_.num_copied_params()); |
| + } |
| } |
| if (function.IsClosureFunction()) { |
| - Register reg = context_index; |
| + Register reg = is_optimizing() ? flow_graph_.num_copied_params() |
| + : context_index; |
| Register closure_reg = reg; |
| LocalScope* scope = parsed_function().node_sequence()->scope(); |
| LocalVariable* local = scope->VariableAt(0); |
| @@ -229,7 +366,7 @@ void FlowGraphCompiler::EmitFrameEntry() { |
| closure_reg = -local->index() - 1; |
| } |
| __ LoadField(reg, closure_reg, Closure::context_offset() / kWordSize); |
| - } else if (has_optional_params) { |
| + } else if (has_optional_params && !is_optimizing()) { |
| __ LoadConstant(context_index, |
| Object::Handle(isolate()->object_store()->empty_context())); |
| } |
| @@ -254,12 +391,52 @@ void FlowGraphCompiler::CompileGraph() { |
| void ParallelMoveResolver::EmitMove(int index) { |
| - UNIMPLEMENTED(); |
| + MoveOperands* move = moves_[index]; |
| + const Location source = move->src(); |
| + const Location destination = move->dest(); |
| + if (source.IsStackSlot() && destination.IsRegister()) { |
| + // Only allow access to the arguments. |
| + ASSERT(source.base_reg() == FPREG); |
| + ASSERT(source.stack_index() < 0); |
| + __ Move(destination.reg(), -kParamEndSlotFromFp + source.stack_index()); |
| + } else if (source.IsRegister() && destination.IsRegister()) { |
| + __ Move(destination.reg(), source.reg()); |
| + } else if (source.IsConstant() && destination.IsRegister()) { |
| + __ LoadConstant(destination.reg(), source.constant()); |
| + } else { |
| + compiler_->Bailout("Unsupported move"); |
| + } |
| + |
| + move->Eliminate(); |
| } |
| void ParallelMoveResolver::EmitSwap(int index) { |
| - UNIMPLEMENTED(); |
| + MoveOperands* move = moves_[index]; |
| + const Location source = move->src(); |
| + const Location destination = move->dest(); |
| + |
| + if (source.IsRegister() && destination.IsRegister()) { |
| + __ Swap(destination.reg(), source.reg()); |
| + } else { |
| + UNREACHABLE(); |
|
Florian Schneider
2016/05/19 13:21:40
Use ASSERT instead of if-statement?
Vyacheslav Egorov (Google)
2016/05/19 15:19:40
Done.
|
| + } |
| + |
| + // The swap of source and destination has executed a move from source to |
| + // destination. |
| + move->Eliminate(); |
| + |
| + // Any unperformed (including pending) move with a source of either |
| + // this move's source or destination needs to have their source |
| + // changed to reflect the state of affairs after the swap. |
| + for (int i = 0; i < moves_.length(); ++i) { |
| + const MoveOperands& other_move = *moves_[i]; |
| + if (other_move.Blocks(source)) { |
| + moves_[i]->set_src(destination); |
| + } else if (other_move.Blocks(destination)) { |
| + moves_[i]->set_src(source); |
| + } |
| + } |
| } |