Index: src/arm/lithium-codegen-arm.cc |
=================================================================== |
--- src/arm/lithium-codegen-arm.cc (revision 7890) |
+++ src/arm/lithium-codegen-arm.cc (working copy) |
@@ -595,19 +595,28 @@ |
return; |
} |
+ if (FLAG_trap_on_deopt) { |
+ __ stop("trap_on_deopt", cc); |
+ } |
+ |
if (cc == al) { |
- if (FLAG_trap_on_deopt) __ stop("trap_on_deopt"); |
+ // We don't need to optimize this code since the branch is always taken. |
__ Jump(entry, RelocInfo::RUNTIME_ENTRY); |
} else { |
Søren Thygesen Gjesse
2011/05/16 07:26:39
The x64 also uses a jump table, which is emitted a
|
- if (FLAG_trap_on_deopt) { |
- Label done; |
- __ b(&done, NegateCondition(cc)); |
- __ stop("trap_on_deopt"); |
- __ Jump(entry, RelocInfo::RUNTIME_ENTRY); |
- __ bind(&done); |
- } else { |
- __ Jump(entry, RelocInfo::RUNTIME_ENTRY, cc); |
- } |
+ Label record_deopt_jump_entry; |
+ __ bind(&record_deopt_jump_entry); |
+ // This breakpoint will be patched to a branch to the correct location when |
+ // the deoptimization jump table is emitted (in Assembler::CheckConstPool). |
+ // This happens before the code is executed so we should actually never hit |
+ // this breakpoint. |
+ // Make sure the constant pool is not emitted between our record and the |
+ // breakpoint instruction. |
+ __ BlockConstPoolFor(1); |
+ __ RecordDeoptJumpEntry(entry, cc); |
+ __ bkpt(kBkptUninitializedCode); |
+ // This is a performance critical section. We don't want to emit more |
+ // than this breakpoint patched to an almost never taken branch. |
+ ASSERT(__ InstructionsGeneratedSince(&record_deopt_jump_entry) == 1); |
} |
} |