| Index: runtime/vm/flow_graph_compiler_arm64.cc
|
| diff --git a/runtime/vm/flow_graph_compiler_arm64.cc b/runtime/vm/flow_graph_compiler_arm64.cc
|
| index ac4d66df0b5f5754694a2d253324c1a80b11b65f..405d974883c0457b4cbcda341bd94c98ee663ce4 100644
|
| --- a/runtime/vm/flow_graph_compiler_arm64.cc
|
| +++ b/runtime/vm/flow_graph_compiler_arm64.cc
|
| @@ -1106,21 +1106,6 @@ void FlowGraphCompiler::CompileGraph() {
|
| __ brk(0);
|
| ASSERT(assembler()->constant_pool_allowed());
|
| GenerateDeferredCode();
|
| -
|
| - BeginCodeSourceRange();
|
| - if (is_optimizing() && !FLAG_precompiled_mode) {
|
| - // Leave enough space for patching in case of lazy deoptimization.
|
| - for (intptr_t i = 0;
|
| - i < CallPattern::kDeoptCallLengthInInstructions;
|
| - ++i) {
|
| - __ orr(R0, ZR, Operand(R0)); // nop
|
| - }
|
| - lazy_deopt_return_pc_offset_ = assembler()->CodeSize();
|
| - __ BranchPatchable(*StubCode::DeoptimizeLazyFromReturn_entry());
|
| - lazy_deopt_throw_pc_offset_ = assembler()->CodeSize();
|
| - __ BranchPatchable(*StubCode::DeoptimizeLazyFromThrow_entry());
|
| - }
|
| - EndCodeSourceRange(TokenPosition::kDartCodeEpilogue);
|
| }
|
|
|
|
|
|
|