Index: src/deoptimizer.cc |
diff --git a/src/deoptimizer.cc b/src/deoptimizer.cc |
index dc9ffc51186be1c5535297c6adf77f9efbf922b1..1891713995b1f98b1ddfd37e910de080f7f02c9e 100644 |
--- a/src/deoptimizer.cc |
+++ b/src/deoptimizer.cc |
@@ -2577,9 +2577,17 @@ bool Deoptimizer::DoOsrTranslateCommand(TranslationIterator* iterator, |
} |
-void Deoptimizer::PatchInterruptCode(Code* unoptimized_code, |
- Code* interrupt_code, |
- Code* replacement_code) { |
+void Deoptimizer::PatchInterruptCode(Isolate* isolate, |
+ Code* unoptimized_code) { |
+ DisallowHeapAllocation no_gc; |
+ // Get the interrupt stub code object to match against. We aren't |
+ // prepared to generate it, but we don't expect to have to. |
+ Code* interrupt_code = NULL; |
+ InterruptStub interrupt_stub; |
+ if (!interrupt_stub.FindCodeInCache(&interrupt_code, isolate)) return; |
titzer
2013/08/28 12:32:55
Should this be an error?
Yang
2013/08/28 12:41:58
yup. I'll turn that into an UNREACHABLE.
|
+ Code* replacement_code = |
+ isolate->builtins()->builtin(Builtins::kOnStackReplacement); |
+ |
// Iterate over the back edge table and patch every interrupt |
// call to an unconditional call to the replacement code. |
int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level(); |
@@ -2588,9 +2596,11 @@ void Deoptimizer::PatchInterruptCode(Code* unoptimized_code, |
!back_edges.Done(); |
back_edges.Next()) { |
if (static_cast<int>(back_edges.loop_depth()) == loop_nesting_level) { |
+ ASSERT_EQ(NOT_PATCHED, GetInterruptPatchState(isolate, |
+ unoptimized_code, |
+ back_edges.pc())); |
PatchInterruptCodeAt(unoptimized_code, |
back_edges.pc(), |
- interrupt_code, |
replacement_code); |
} |
} |
@@ -2598,14 +2608,17 @@ void Deoptimizer::PatchInterruptCode(Code* unoptimized_code, |
unoptimized_code->set_back_edges_patched_for_osr(true); |
#ifdef DEBUG |
Deoptimizer::VerifyInterruptCode( |
- unoptimized_code, interrupt_code, replacement_code, loop_nesting_level); |
+ isolate, unoptimized_code, loop_nesting_level); |
#endif // DEBUG |
} |
-void Deoptimizer::RevertInterruptCode(Code* unoptimized_code, |
- Code* interrupt_code, |
- Code* replacement_code) { |
+void Deoptimizer::RevertInterruptCode(Isolate* isolate, |
+ Code* unoptimized_code) { |
+ InterruptStub interrupt_stub; |
+ Code* interrupt_code = *interrupt_stub.GetCode(isolate); |
+ DisallowHeapAllocation no_gc; |
+ |
// Iterate over the back edge table and revert the patched interrupt calls. |
ASSERT(unoptimized_code->back_edges_patched_for_osr()); |
int loop_nesting_level = unoptimized_code->allow_osr_at_loop_nesting_level(); |
@@ -2614,10 +2627,10 @@ void Deoptimizer::RevertInterruptCode(Code* unoptimized_code, |
!back_edges.Done(); |
back_edges.Next()) { |
if (static_cast<int>(back_edges.loop_depth()) <= loop_nesting_level) { |
- RevertInterruptCodeAt(unoptimized_code, |
- back_edges.pc(), |
- interrupt_code, |
- replacement_code); |
+ ASSERT_NE(NOT_PATCHED, GetInterruptPatchState(isolate, |
titzer
2013/08/28 12:32:55
ASSERT_EQ(PATCHED_FOR_OSR, ... ?
Yang
2013/08/28 12:41:58
In this particular case, ASSERT_EQ(PATCHED_FOR_OSR
|
+ unoptimized_code, |
+ back_edges.pc())); |
+ RevertInterruptCodeAt(unoptimized_code, back_edges.pc(), interrupt_code); |
} |
} |
@@ -2625,16 +2638,14 @@ void Deoptimizer::RevertInterruptCode(Code* unoptimized_code, |
unoptimized_code->set_allow_osr_at_loop_nesting_level(0); |
#ifdef DEBUG |
// Assert that none of the back edges are patched anymore. |
- Deoptimizer::VerifyInterruptCode( |
- unoptimized_code, interrupt_code, replacement_code, -1); |
+ Deoptimizer::VerifyInterruptCode(isolate, unoptimized_code, -1); |
#endif // DEBUG |
} |
#ifdef DEBUG |
-void Deoptimizer::VerifyInterruptCode(Code* unoptimized_code, |
- Code* interrupt_code, |
- Code* replacement_code, |
+void Deoptimizer::VerifyInterruptCode(Isolate* isolate, |
+ Code* unoptimized_code, |
int loop_nesting_level) { |
for (FullCodeGenerator::BackEdgeTableIterator back_edges(unoptimized_code); |
!back_edges.Done(); |
@@ -2644,10 +2655,9 @@ void Deoptimizer::VerifyInterruptCode(Code* unoptimized_code, |
// Assert that all back edges for shallower loops (and only those) |
// have already been patched. |
CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level), |
- InterruptCodeIsPatched(unoptimized_code, |
- back_edges.pc(), |
- interrupt_code, |
- replacement_code)); |
+ GetInterruptPatchState(isolate, |
+ unoptimized_code, |
+ back_edges.pc()) != NOT_PATCHED); |
} |
} |
#endif // DEBUG |