| Index: runtime/vm/flow_graph_compiler_ia32.cc
|
| diff --git a/runtime/vm/flow_graph_compiler_ia32.cc b/runtime/vm/flow_graph_compiler_ia32.cc
|
| index 37e3f3d6b5dac54093d0a097c75160c9803c58ca..85ed48fb158d6ccd820f33ae8d9f84a7f5b55666 100644
|
| --- a/runtime/vm/flow_graph_compiler_ia32.cc
|
| +++ b/runtime/vm/flow_graph_compiler_ia32.cc
|
| @@ -505,7 +505,10 @@ void FlowGraphCompiler::GenerateInstanceOf(intptr_t token_pos,
|
| __ pushl(EDX); // Instantiator type arguments.
|
| __ LoadObject(EAX, test_cache);
|
| __ pushl(EAX);
|
| - GenerateCallRuntime(token_pos, deopt_id, kInstanceofRuntimeEntry, locs);
|
| + GenerateCallRuntime(token_pos,
|
| + deopt_id,
|
| + kInstanceofRuntimeEntry,
|
| + locs);
|
| // Pop the parameters supplied to the runtime entry. The result of the
|
| // instanceof runtime call will be left as the result of the operation.
|
| __ Drop(5);
|
| @@ -628,14 +631,18 @@ void FlowGraphCompiler::EmitInstructionPrologue(Instruction* instr) {
|
| if (!is_optimizing()) {
|
| if (FLAG_enable_type_checks && instr->IsAssertAssignable()) {
|
| AssertAssignableInstr* assert = instr->AsAssertAssignable();
|
| - AddCurrentDescriptor(PcDescriptors::kDeoptBefore,
|
| + AddCurrentDescriptor(PcDescriptors::kDeopt,
|
| assert->deopt_id(),
|
| assert->token_pos());
|
| } else if (instr->IsGuardField()) {
|
| GuardFieldInstr* guard = instr->AsGuardField();
|
| - AddCurrentDescriptor(PcDescriptors::kDeoptBefore,
|
| + AddCurrentDescriptor(PcDescriptors::kDeopt,
|
| guard->deopt_id(),
|
| Scanner::kDummyTokenIndex);
|
| + } else if (instr->CanBeDeoptimizationTarget()) {
|
| + AddCurrentDescriptor(PcDescriptors::kDeopt,
|
| + instr->deopt_id(),
|
| + Scanner::kDummyTokenIndex);
|
| }
|
| AllocateRegistersLocally(instr);
|
| }
|
| @@ -1094,14 +1101,13 @@ void FlowGraphCompiler::GenerateDartCall(intptr_t deopt_id,
|
| RecordSafepoint(locs);
|
| // Marks either the continuation point in unoptimized code or the
|
| // deoptimization point in optimized code, after call.
|
| + const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id);
|
| if (is_optimizing()) {
|
| - AddDeoptIndexAtCall(deopt_id, token_pos);
|
| + AddDeoptIndexAtCall(deopt_id_after, token_pos);
|
| } else {
|
| // Add deoptimization continuation point after the call and before the
|
| // arguments are removed.
|
| - AddCurrentDescriptor(PcDescriptors::kDeoptAfter,
|
| - deopt_id,
|
| - token_pos);
|
| + AddCurrentDescriptor(PcDescriptors::kDeopt, deopt_id_after, token_pos);
|
| }
|
| }
|
|
|
| @@ -1116,14 +1122,13 @@ void FlowGraphCompiler::GenerateCallRuntime(intptr_t token_pos,
|
| if (deopt_id != Isolate::kNoDeoptId) {
|
| // Marks either the continuation point in unoptimized code or the
|
| // deoptimization point in optimized code, after call.
|
| + const intptr_t deopt_id_after = Isolate::ToDeoptAfter(deopt_id);
|
| if (is_optimizing()) {
|
| - AddDeoptIndexAtCall(deopt_id, token_pos);
|
| + AddDeoptIndexAtCall(deopt_id_after, token_pos);
|
| } else {
|
| // Add deoptimization continuation point after the call and before the
|
| // arguments are removed.
|
| - AddCurrentDescriptor(PcDescriptors::kDeoptAfter,
|
| - deopt_id,
|
| - token_pos);
|
| + AddCurrentDescriptor(PcDescriptors::kDeopt, deopt_id_after, token_pos);
|
| }
|
| }
|
| }
|
| @@ -1235,7 +1240,7 @@ void FlowGraphCompiler::EmitMegamorphicInstanceCall(
|
| __ call(EAX);
|
| AddCurrentDescriptor(PcDescriptors::kOther, Isolate::kNoDeoptId, token_pos);
|
| RecordSafepoint(locs);
|
| - AddDeoptIndexAtCall(deopt_id, token_pos);
|
| + AddDeoptIndexAtCall(Isolate::ToDeoptAfter(deopt_id), token_pos);
|
| __ Drop(argument_count);
|
| }
|
|
|
|
|