Index: runtime/vm/flow_graph_compiler.cc |
=================================================================== |
--- runtime/vm/flow_graph_compiler.cc (revision 41279) |
+++ runtime/vm/flow_graph_compiler.cc (working copy) |
@@ -225,7 +225,8 @@ |
static bool IsEmptyBlock(BlockEntryInstr* block) { |
- return !block->HasNonRedundantParallelMove() && |
+ return !block->IsCatchBlockEntry() && |
+ !block->HasNonRedundantParallelMove() && |
block->next()->IsGoto() && |
!block->next()->AsGoto()->HasNonRedundantParallelMove(); |
} |
@@ -287,7 +288,7 @@ |
// own so that it can control the placement. |
AddCurrentDescriptor(RawPcDescriptors::kDeopt, |
instr->deopt_id(), |
- Scanner::kNoSourcePos); |
+ instr->token_pos()); |
} |
AllocateRegistersLocally(instr); |
} else if (instr->MayThrow() && |
@@ -1108,7 +1109,6 @@ |
} |
// Do not allocate known registers. |
- blocked_registers[CTX] = true; |
blocked_registers[SPREG] = true; |
blocked_registers[FPREG] = true; |
if (TMP != kNoRegister) { |
@@ -1385,7 +1385,6 @@ |
reg_(kNoRegister), |
spilled_(false) { |
uword blocked_mask = MaskBit(blocked) |
- | MaskBit(CTX) |
| MaskBit(SPREG) |
| MaskBit(FPREG) |
| MaskBit(TMP) |
@@ -1394,6 +1393,7 @@ |
if (resolver->compiler_->intrinsic_mode()) { |
// Block additional registers that must be preserved for intrinsics. |
blocked_mask |= MaskBit(ARGS_DESC_REG); |
+ blocked_mask |= MaskBit(CTX); |
} |
reg_ = static_cast<Register>( |
resolver_->AllocateScratchRegister(Location::kRegister, |