Index: src/ia32/full-codegen-ia32.cc |
=================================================================== |
--- src/ia32/full-codegen-ia32.cc (revision 6011) |
+++ src/ia32/full-codegen-ia32.cc (working copy) |
@@ -41,8 +41,61 @@ |
namespace v8 { |
namespace internal { |
+ |
#define __ ACCESS_MASM(masm_) |
+ |
+class JumpPatchSite BASE_EMBEDDED { |
+ public: |
+ explicit JumpPatchSite(MacroAssembler* masm) |
+ : masm_(masm) { |
+#ifdef DEBUG |
+ info_emitted_ = false; |
+#endif |
+ } |
+ |
+ ~JumpPatchSite() { |
+ ASSERT(patch_site_.is_bound() == info_emitted_); |
+ } |
+ |
+ void EmitJumpIfNotSmi(Register reg, NearLabel* target) { |
+ __ test(reg, Immediate(kSmiTagMask)); |
+ EmitJump(not_carry, target); // Always taken before patched. |
+ } |
+ |
+ void EmitJumpIfSmi(Register reg, NearLabel* target) { |
+ __ test(reg, Immediate(kSmiTagMask)); |
+ EmitJump(carry, target); // Never taken before patched. |
+ } |
+ |
+ void EmitPatchInfo() { |
+ int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_); |
+ ASSERT(is_int8(delta_to_patch_site)); |
+ __ test(eax, Immediate(delta_to_patch_site)); |
+#ifdef DEBUG |
+ info_emitted_ = true; |
+#endif |
+ } |
+ |
+ bool is_bound() const { return patch_site_.is_bound(); } |
+ |
+ private: |
+ // jc will be patched with jz, jnc will become jnz. |
+ void EmitJump(Condition cc, NearLabel* target) { |
+ ASSERT(!patch_site_.is_bound() && !info_emitted_); |
+ ASSERT(cc == carry || cc == not_carry); |
+ __ bind(&patch_site_); |
+ __ j(cc, target); |
+ } |
+ |
+ MacroAssembler* masm_; |
+ Label patch_site_; |
+#ifdef DEBUG |
+ bool info_emitted_; |
+#endif |
+}; |
+ |
+ |
// Generate code for a JS function. On entry to the function the receiver |
// and arguments have been pushed on the stack left to right, with the |
// return address on top of them. The actual argument count matches the |
@@ -715,12 +768,13 @@ |
// Perform the comparison as if via '==='. |
__ mov(edx, Operand(esp, 0)); // Switch value. |
bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); |
+ JumpPatchSite patch_site(masm_); |
if (inline_smi_code) { |
NearLabel slow_case; |
__ mov(ecx, edx); |
__ or_(ecx, Operand(eax)); |
- __ test(ecx, Immediate(kSmiTagMask)); |
- __ j(not_zero, &slow_case, not_taken); |
+ patch_site.EmitJumpIfNotSmi(ecx, &slow_case); |
+ |
__ cmp(edx, Operand(eax)); |
__ j(not_equal, &next_test); |
__ Drop(1); // Switch value is no longer needed. |
@@ -730,9 +784,8 @@ |
// Record position before stub call for type feedback. |
SetSourcePosition(clause->position()); |
- |
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT); |
- __ call(ic, RelocInfo::CODE_TARGET); |
+ EmitCallIC(ic, &patch_site); |
__ test(eax, Operand(eax)); |
__ j(not_equal, &next_test); |
@@ -1556,12 +1609,11 @@ |
OverwriteMode mode, |
bool left_is_constant_smi, |
Smi* value) { |
- NearLabel call_stub; |
- Label done; |
+ NearLabel call_stub, done; |
__ add(Operand(eax), Immediate(value)); |
__ j(overflow, &call_stub); |
- __ test(eax, Immediate(kSmiTagMask)); |
- __ j(zero, &done); |
+ JumpPatchSite patch_site(masm_); |
+ patch_site.EmitJumpIfSmi(eax, &done); |
// Undo the optimistic add operation and call the shared stub. |
__ bind(&call_stub); |
@@ -1574,7 +1626,8 @@ |
__ mov(edx, eax); |
__ mov(eax, Immediate(value)); |
} |
- __ CallStub(&stub); |
+ EmitCallIC(stub.GetCode(), &patch_site); |
+ |
__ bind(&done); |
context()->Plug(eax); |
} |
@@ -1584,7 +1637,7 @@ |
OverwriteMode mode, |
bool left_is_constant_smi, |
Smi* value) { |
- Label call_stub, done; |
+ NearLabel call_stub, done; |
if (left_is_constant_smi) { |
__ mov(ecx, eax); |
__ mov(eax, Immediate(value)); |
@@ -1593,8 +1646,8 @@ |
__ sub(Operand(eax), Immediate(value)); |
} |
__ j(overflow, &call_stub); |
- __ test(eax, Immediate(kSmiTagMask)); |
- __ j(zero, &done); |
+ JumpPatchSite patch_site(masm_); |
+ patch_site.EmitJumpIfSmi(eax, &done); |
__ bind(&call_stub); |
if (left_is_constant_smi) { |
@@ -1607,7 +1660,8 @@ |
} |
Token::Value op = Token::SUB; |
TypeRecordingBinaryOpStub stub(op, mode); |
- __ CallStub(&stub); |
+ EmitCallIC(stub.GetCode(), &patch_site); |
+ |
__ bind(&done); |
context()->Plug(eax); |
} |
@@ -1617,19 +1671,21 @@ |
Token::Value op, |
OverwriteMode mode, |
Smi* value) { |
- Label call_stub, smi_case, done; |
+ NearLabel call_stub, smi_case, done; |
int shift_value = value->value() & 0x1f; |
- __ test(eax, Immediate(kSmiTagMask)); |
- __ j(zero, &smi_case); |
+ JumpPatchSite patch_site(masm_); |
+ patch_site.EmitJumpIfSmi(eax, &smi_case); |
+ // Call stub. |
__ bind(&call_stub); |
__ mov(edx, eax); |
__ mov(eax, Immediate(value)); |
TypeRecordingBinaryOpStub stub(op, mode); |
- __ CallStub(&stub); |
+ EmitCallIC(stub.GetCode(), &patch_site); |
__ jmp(&done); |
+ // Smi case. |
__ bind(&smi_case); |
switch (op) { |
case Token::SHL: |
@@ -1679,17 +1735,19 @@ |
Token::Value op, |
OverwriteMode mode, |
Smi* value) { |
- Label smi_case, done; |
- __ test(eax, Immediate(kSmiTagMask)); |
- __ j(zero, &smi_case); |
+ NearLabel smi_case, done; |
+ JumpPatchSite patch_site(masm_); |
+ patch_site.EmitJumpIfSmi(eax, &smi_case); |
+ |
// The order of the arguments does not matter for bit-ops with a |
// constant operand. |
__ mov(edx, Immediate(value)); |
TypeRecordingBinaryOpStub stub(op, mode); |
- __ CallStub(&stub); |
+ EmitCallIC(stub.GetCode(), &patch_site); |
__ jmp(&done); |
+ // Smi case. |
__ bind(&smi_case); |
switch (op) { |
case Token::BIT_OR: |
@@ -1757,19 +1815,20 @@ |
// Do combined smi check of the operands. Left operand is on the |
// stack. Right operand is in eax. |
- Label done, stub_call, smi_case; |
+ NearLabel done, smi_case, stub_call; |
__ pop(edx); |
__ mov(ecx, eax); |
__ or_(eax, Operand(edx)); |
- __ test(eax, Immediate(kSmiTagMask)); |
- __ j(zero, &smi_case); |
+ JumpPatchSite patch_site(masm_); |
+ patch_site.EmitJumpIfSmi(eax, &smi_case); |
__ bind(&stub_call); |
__ mov(eax, ecx); |
TypeRecordingBinaryOpStub stub(op, mode); |
- __ CallStub(&stub); |
+ EmitCallIC(stub.GetCode(), &patch_site); |
__ jmp(&done); |
+ // Smi case. |
__ bind(&smi_case); |
__ mov(eax, edx); // Copy left operand in case of a stub call. |
@@ -1848,7 +1907,7 @@ |
OverwriteMode mode) { |
__ pop(edx); |
TypeRecordingBinaryOpStub stub(op, mode); |
- __ CallStub(&stub); |
+ EmitCallIC(stub.GetCode(), NULL); // NULL signals no inlined smi code. |
context()->Plug(eax); |
} |
@@ -3713,8 +3772,9 @@ |
} |
// Inline smi case if we are in a loop. |
- NearLabel stub_call; |
- Label done; |
+ NearLabel stub_call, done; |
+ JumpPatchSite patch_site(masm_); |
+ |
if (ShouldInlineSmiCase(expr->op())) { |
if (expr->op() == Token::INC) { |
__ add(Operand(eax), Immediate(Smi::FromInt(1))); |
@@ -3724,8 +3784,8 @@ |
__ j(overflow, &stub_call); |
// We could eliminate this smi check if we split the code at |
// the first smi check before calling ToNumber. |
- __ test(eax, Immediate(kSmiTagMask)); |
- __ j(zero, &done); |
+ patch_site.EmitJumpIfSmi(eax, &done); |
+ |
__ bind(&stub_call); |
// Call stub. Undo operation first. |
if (expr->op() == Token::INC) { |
@@ -3743,7 +3803,7 @@ |
__ mov(eax, Immediate(Smi::FromInt(1))); |
TypeRecordingBinaryOpStub stub(expr->binary_op(), |
NO_OVERWRITE); |
- __ CallStub(&stub); |
+ EmitCallIC(stub.GetCode(), &patch_site); |
__ bind(&done); |
// Store the value returned in eax. |
@@ -4016,21 +4076,22 @@ |
} |
bool inline_smi_code = ShouldInlineSmiCase(op); |
+ JumpPatchSite patch_site(masm_); |
if (inline_smi_code) { |
NearLabel slow_case; |
__ mov(ecx, Operand(edx)); |
__ or_(ecx, Operand(eax)); |
- __ test(ecx, Immediate(kSmiTagMask)); |
- __ j(not_zero, &slow_case, not_taken); |
+ patch_site.EmitJumpIfNotSmi(ecx, &slow_case); |
__ cmp(edx, Operand(eax)); |
Split(cc, if_true, if_false, NULL); |
__ bind(&slow_case); |
} |
// Record position and call the compare IC. |
+ SetSourcePosition(expr->position()); |
Handle<Code> ic = CompareIC::GetUninitialized(op); |
- SetSourcePosition(expr->position()); |
- __ call(ic, RelocInfo::CODE_TARGET); |
+ EmitCallIC(ic, &patch_site); |
+ |
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
__ test(eax, Operand(eax)); |
Split(cc, if_true, if_false, fall_through); |
@@ -4134,6 +4195,16 @@ |
} |
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) { |
+ __ call(ic, RelocInfo::CODE_TARGET); |
+ if (patch_site != NULL && patch_site->is_bound()) { |
+ patch_site->EmitPatchInfo(); |
+ } else { |
+ __ nop(); // Signals no inlined code. |
+ } |
+} |
+ |
+ |
void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { |
ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); |
__ mov(Operand(ebp, frame_offset), value); |