Index: src/ia32/full-codegen-ia32.cc |
diff --git a/src/ia32/full-codegen-ia32.cc b/src/ia32/full-codegen-ia32.cc |
index 0b2d42d191c7fb04fccffb3867571f086e7c715a..7dba42466c173fea4bd32cbb9ba4e648be9ee75c 100644 |
--- a/src/ia32/full-codegen-ia32.cc |
+++ b/src/ia32/full-codegen-ia32.cc |
@@ -62,14 +62,18 @@ class JumpPatchSite BASE_EMBEDDED { |
ASSERT(patch_site_.is_bound() == info_emitted_); |
} |
- void EmitJumpIfNotSmi(Register reg, NearLabel* target) { |
+ void EmitJumpIfNotSmi(Register reg, |
+ Label* target, |
+ Label::Distance near = Label::kFar) { |
__ test(reg, Immediate(kSmiTagMask)); |
- EmitJump(not_carry, target); // Always taken before patched. |
+ EmitJump(not_carry, target, near); // Always taken before patched. |
} |
- void EmitJumpIfSmi(Register reg, NearLabel* target) { |
+ void EmitJumpIfSmi(Register reg, |
+ Label* target, |
+ Label::Distance near = Label::kFar) { |
__ test(reg, Immediate(kSmiTagMask)); |
- EmitJump(carry, target); // Never taken before patched. |
+ EmitJump(carry, target, near); // Never taken before patched. |
} |
void EmitPatchInfo() { |
@@ -85,11 +89,11 @@ class JumpPatchSite BASE_EMBEDDED { |
private: |
// jc will be patched with jz, jnc will become jnz. |
- void EmitJump(Condition cc, NearLabel* target) { |
+ void EmitJump(Condition cc, Label* target, Label::Distance near) { |
ASSERT(!patch_site_.is_bound() && !info_emitted_); |
ASSERT(cc == carry || cc == not_carry); |
__ bind(&patch_site_); |
- __ j(cc, target); |
+ __ j(cc, target, near); |
} |
MacroAssembler* masm_; |
@@ -798,10 +802,10 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { |
bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); |
JumpPatchSite patch_site(masm_); |
if (inline_smi_code) { |
- NearLabel slow_case; |
+ Label slow_case; |
__ mov(ecx, edx); |
__ or_(ecx, Operand(eax)); |
- patch_site.EmitJumpIfNotSmi(ecx, &slow_case); |
+ patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear); |
__ cmp(edx, Operand(eax)); |
__ j(not_equal, &next_test); |
@@ -1657,13 +1661,12 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, |
Expression* right) { |
// Do combined smi check of the operands. Left operand is on the |
// stack. Right operand is in eax. |
- NearLabel smi_case; |
- Label done, stub_call; |
+ Label smi_case, done, stub_call; |
__ pop(edx); |
__ mov(ecx, eax); |
__ or_(eax, Operand(edx)); |
JumpPatchSite patch_site(masm_); |
- patch_site.EmitJumpIfSmi(eax, &smi_case); |
+ patch_site.EmitJumpIfSmi(eax, &smi_case, Label::kNear); |
__ bind(&stub_call); |
__ mov(eax, ecx); |
@@ -3860,8 +3863,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { |
} |
// Inline smi case if we are in a loop. |
- NearLabel done; |
- Label stub_call; |
+ Label done, stub_call; |
JumpPatchSite patch_site(masm_); |
if (ShouldInlineSmiCase(expr->op())) { |
@@ -3873,7 +3875,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) { |
__ j(overflow, &stub_call, Label::kNear); |
// We could eliminate this smi check if we split the code at |
// the first smi check before calling ToNumber. |
- patch_site.EmitJumpIfSmi(eax, &done); |
+ patch_site.EmitJumpIfSmi(eax, &done, Label::kNear); |
__ bind(&stub_call); |
// Call stub. Undo operation first. |
@@ -4156,10 +4158,10 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) { |
bool inline_smi_code = ShouldInlineSmiCase(op); |
JumpPatchSite patch_site(masm_); |
if (inline_smi_code) { |
- NearLabel slow_case; |
+ Label slow_case; |
__ mov(ecx, Operand(edx)); |
__ or_(ecx, Operand(eax)); |
- patch_site.EmitJumpIfNotSmi(ecx, &slow_case); |
+ patch_site.EmitJumpIfNotSmi(ecx, &slow_case, Label::kNear); |
__ cmp(edx, Operand(eax)); |
Split(cc, if_true, if_false, NULL); |
__ bind(&slow_case); |