Chromium Code Reviews| Index: runtime/vm/debugger_dbc.cc |
| diff --git a/runtime/vm/debugger_dbc.cc b/runtime/vm/debugger_dbc.cc |
| index 5f82cd5368a10a3456def0225c9684c30ca2cadf..b77adae2171686c24d3055a4a0071acf4dc7172c 100644 |
| --- a/runtime/vm/debugger_dbc.cc |
| +++ b/runtime/vm/debugger_dbc.cc |
| @@ -25,6 +25,11 @@ static Instr* CallInstructionFromReturnAddress(uword pc) { |
| } |
| +static Instr* FastSmiInstructionFromCallReturnAddress(uword pc) { |
| + return reinterpret_cast<Instr*>(pc) - 2; |
| +} |
| + |
| + |
| void CodeBreakpoint::PatchCode() { |
| ASSERT(!is_enabled_); |
| const Code& code = Code::Handle(code_); |
| @@ -54,6 +59,15 @@ void CodeBreakpoint::PatchCode() { |
| default: |
| UNREACHABLE(); |
| } |
| + |
| + // If this call is the fall-through for a fast Smi op, also disable the fast |
| + // Smi op. |
| + if (Bytecode::DecodeOpcode(saved_value_) == Bytecode::kInstanceCall2) { |
|
zra
2016/07/28 18:08:48
Looking at the code in InstanceCallInstr::EmitNati
rmacnak
2016/07/28 20:13:14
So those other cases turn out to be []= and some k
zra
2016/07/28 20:17:47
Cool. Thanks for looking into this and adding a te
|
| + saved_value_fastsmi_ = *FastSmiInstructionFromCallReturnAddress(pc_); |
| + ASSERT(Bytecode::IsFastSmiOpcode(saved_value_fastsmi_)); |
| + *FastSmiInstructionFromCallReturnAddress(pc_) = |
| + Bytecode::Encode(Bytecode::kNop, 0, 0, 0); |
| + } |
| } |
| is_enabled_ = true; |
| } |
| @@ -75,6 +89,12 @@ void CodeBreakpoint::RestoreCode() { |
| default: |
| UNREACHABLE(); |
| } |
| + |
| + if (Bytecode::DecodeOpcode(saved_value_) == Bytecode::kInstanceCall2) { |
| + Instr current_instr = *FastSmiInstructionFromCallReturnAddress(pc_); |
| + ASSERT(Bytecode::DecodeOpcode(current_instr) == Bytecode::kNop); |
| + *FastSmiInstructionFromCallReturnAddress(pc_) = saved_value_fastsmi_; |
| + } |
| } |
| is_enabled_ = false; |
| } |