Index: src/ia32/full-codegen-ia32.cc |
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc |
index c9f7f32b6a2ec74ae81a14cd91017e1056c77742..9a2c3ce72c9dff731f7160aa9130a49472dba47a 100644 |
--- a/src/ia32/full-codegen-ia32.cc |
+++ b/src/ia32/full-codegen-ia32.cc |
@@ -4891,6 +4891,88 @@ FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit( |
#undef __ |
+ |
+static const byte kJnsInstruction = 0x79; |
+static const byte kJnsOffset = 0x11; |
+static const byte kCallInstruction = 0xe8; |
+static const byte kNopByteOne = 0x66; |
+static const byte kNopByteTwo = 0x90; |
+ |
+// The back edge bookkeeping code matches the pattern: |
+// |
+// sub <profiling_counter>, <delta> |
+// jns ok |
+// call <interrupt stub> |
+// ok: |
+// |
+// The patched back edge looks like this: |
+// |
+// sub <profiling_counter>, <delta> ;; Not changed |
+// nop |
+// nop |
+// call <on-stack replacment> |
+// ok: |
+ |
+void BackEdgeTable::PatchAt(Code* unoptimized_code, |
+ Address pc, |
+ Code* replacement_code) { |
+ // Turn the jump into nops. |
+ Address call_target_address = pc - kIntSize; |
+ *(call_target_address - 3) = kNopByteOne; |
+ *(call_target_address - 2) = kNopByteTwo; |
+ // Replace the call address. |
+ Assembler::set_target_address_at(call_target_address, |
+ replacement_code->entry()); |
+ |
+ unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
+ unoptimized_code, call_target_address, replacement_code); |
+} |
+ |
+ |
+void BackEdgeTable::RevertAt(Code* unoptimized_code, |
+ Address pc, |
+ Code* interrupt_code) { |
+ // Restore the original jump. |
+ Address call_target_address = pc - kIntSize; |
+ *(call_target_address - 3) = kJnsInstruction; |
+ *(call_target_address - 2) = kJnsOffset; |
+ // Restore the original call address. |
+ Assembler::set_target_address_at(call_target_address, |
+ interrupt_code->entry()); |
+ |
+ interrupt_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch( |
+ unoptimized_code, call_target_address, interrupt_code); |
+} |
+ |
+ |
+#ifdef DEBUG |
+BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState( |
+ Isolate* isolate, |
+ Code* unoptimized_code, |
+ Address pc) { |
+ Address call_target_address = pc - kIntSize; |
+ ASSERT_EQ(kCallInstruction, *(call_target_address - 1)); |
+ if (*(call_target_address - 3) == kNopByteOne) { |
+ ASSERT_EQ(kNopByteTwo, *(call_target_address - 2)); |
+ Code* osr_builtin = |
+ isolate->builtins()->builtin(Builtins::kOnStackReplacement); |
+ ASSERT_EQ(osr_builtin->entry(), |
+ Assembler::target_address_at(call_target_address)); |
+ return ON_STACK_REPLACEMENT; |
+ } else { |
+ // Get the interrupt stub code object to match against from cache. |
+ Code* interrupt_builtin = |
+ isolate->builtins()->builtin(Builtins::kInterruptCheck); |
+ ASSERT_EQ(interrupt_builtin->entry(), |
+ Assembler::target_address_at(call_target_address)); |
+ ASSERT_EQ(kJnsInstruction, *(call_target_address - 3)); |
+ ASSERT_EQ(kJnsOffset, *(call_target_address - 2)); |
+ return INTERRUPT; |
+ } |
+} |
+#endif // DEBUG |
+ |
+ |
} } // namespace v8::internal |
#endif // V8_TARGET_ARCH_IA32 |