| Index: runtime/vm/intermediate_language_arm.cc
|
| diff --git a/runtime/vm/intermediate_language_arm.cc b/runtime/vm/intermediate_language_arm.cc
|
| index 00df26bf9fafd3e72a4b6daabbfb796444a8335f..a7e75b810704132e2ba4e1866dafef3e26c58100 100644
|
| --- a/runtime/vm/intermediate_language_arm.cc
|
| +++ b/runtime/vm/intermediate_language_arm.cc
|
| @@ -4444,9 +4444,6 @@ void GraphEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| void TargetEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| __ Bind(compiler->GetJumpLabel(this));
|
| if (!compiler->is_optimizing()) {
|
| - compiler->AddCurrentDescriptor(PcDescriptors::kDeopt,
|
| - deopt_id_,
|
| - Scanner::kDummyTokenIndex);
|
| // Add an edge counter.
|
| const Array& counter = Array::ZoneHandle(Array::New(1, Heap::kOld));
|
| counter.SetAt(0, Smi::Handle(Smi::New(0)));
|
| @@ -4456,6 +4453,12 @@ void TargetEntryInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| __ adds(IP, IP, ShifterOperand(Smi::RawValue(1)));
|
| __ LoadImmediate(IP, Smi::RawValue(Smi::kMaxValue), VS); // If overflow.
|
| __ str(IP, FieldAddress(R0, Array::element_offset(0)));
|
| + // On ARM the deoptimization descriptor points after the edge counter
|
| + // code so that we can reuse the same pattern matching code as at call
|
| + // sites, which matches backwards from the end of the pattern.
|
| + compiler->AddCurrentDescriptor(PcDescriptors::kDeopt,
|
| + deopt_id_,
|
| + Scanner::kDummyTokenIndex);
|
| }
|
| if (HasParallelMove()) {
|
| compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
|
| @@ -4469,6 +4472,25 @@ LocationSummary* GotoInstr::MakeLocationSummary() const {
|
|
|
|
|
| void GotoInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
|
| + if (!compiler->is_optimizing()) {
|
| + // Add an edge counter.
|
| + const Array& counter = Array::ZoneHandle(Array::New(1, Heap::kOld));
|
| + counter.SetAt(0, Smi::Handle(Smi::New(0)));
|
| + __ Comment("Edge counter");
|
| + __ LoadObject(R0, counter);
|
| + __ ldr(IP, FieldAddress(R0, Array::element_offset(0)));
|
| + __ adds(IP, IP, ShifterOperand(Smi::RawValue(1)));
|
| + __ LoadImmediate(IP, Smi::RawValue(Smi::kMaxValue), VS); // If overflow.
|
| + __ str(IP, FieldAddress(R0, Array::element_offset(0)));
|
| + // Add a deoptimization descriptor for deoptimizing instructions that
|
| + // may be inserted before this instruction. On ARM this descriptor
|
| + // points after the edge counter code so that we can reuse the same
|
| + // pattern matching code as at call sites, which matches backwards from
|
| + // the end of the pattern.
|
| + compiler->AddCurrentDescriptor(PcDescriptors::kDeopt,
|
| + GetDeoptId(),
|
| + 0); // No token position.
|
| + }
|
| if (HasParallelMove()) {
|
| compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
|
| }
|
|
|