Chromium Code Reviews| Index: src/ia32/full-codegen-ia32.cc |
| =================================================================== |
| --- src/ia32/full-codegen-ia32.cc (revision 5952) |
| +++ src/ia32/full-codegen-ia32.cc (working copy) |
| @@ -1548,16 +1548,40 @@ |
| } |
| +class JumpPatchSite BASE_EMBEDDED { |
| + public: |
| + JumpPatchSite(MacroAssembler* masm, NearLabel* target, Condition cc) |
| + : masm_(masm), jump_target_(target), cc_(cc) { } |
| + |
| + void EmitJump() { |
| + masm_->bind(&patch_site_); |
|
Vitaly Repeshko
2010/12/09 15:59:01
In debug mode in ~JumpPatchSite we could assert th
fschneider
2010/12/10 13:19:24
Done.
|
| + masm_->jmp(jump_target_); |
| + } |
| + |
| + void EmitPatchInfo() { |
| + int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_); |
| + __ test(eax, Immediate(delta_to_patch_site << 16 | cc_)); |
|
Vitaly Repeshko
2010/12/09 15:59:01
For smi checks the condition is always not_zero. W
fschneider
2010/12/10 13:19:24
Done.
|
| + } |
| + |
| + private: |
| + MacroAssembler* masm_; |
| + Label patch_site_; |
| + NearLabel* jump_target_; |
| + Condition cc_; |
| +}; |
| + |
| + |
| void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr, |
| OverwriteMode mode, |
| bool left_is_constant_smi, |
| Smi* value) { |
| - NearLabel call_stub; |
| - Label done; |
| + NearLabel call_stub, done; |
| __ add(Operand(eax), Immediate(value)); |
| __ j(overflow, &call_stub); |
| __ test(eax, Immediate(kSmiTagMask)); |
| - __ j(zero, &done); |
| + JumpPatchSite patch_site(masm_, &call_stub, not_zero); |
| + patch_site.EmitJump(); |
| + __ jmp(&done); |
| // Undo the optimistic add operation and call the shared stub. |
| __ bind(&call_stub); |
| @@ -1571,6 +1595,8 @@ |
| __ mov(eax, Immediate(value)); |
| } |
| __ CallStub(&stub); |
|
Vitaly Repeshko
2010/12/09 15:59:01
Consider adding a function like EmitCallIC that ta
William Hesse
2010/12/09 16:27:58
I'm not sure this is a good idea, since the existi
Kevin Millikin (Chromium)
2010/12/10 06:41:41
I like Vitaly's idea because it's safer. I think
fschneider
2010/12/10 13:19:24
Done.
|
| + patch_site.EmitPatchInfo(); |
| + |
| __ bind(&done); |
| context()->Plug(eax); |
| } |
| @@ -1580,7 +1606,7 @@ |
| OverwriteMode mode, |
| bool left_is_constant_smi, |
| Smi* value) { |
| - Label call_stub, done; |
| + NearLabel call_stub, done; |
| if (left_is_constant_smi) { |
| __ mov(ecx, eax); |
| __ mov(eax, Immediate(value)); |
| @@ -1590,7 +1616,9 @@ |
| } |
| __ j(overflow, &call_stub); |
| __ test(eax, Immediate(kSmiTagMask)); |
| - __ j(zero, &done); |
| + JumpPatchSite patch_site(masm_, &call_stub, not_zero); |
| + patch_site.EmitJump(); |
| + __ jmp(&done); |
| __ bind(&call_stub); |
| if (left_is_constant_smi) { |
| @@ -1604,6 +1632,8 @@ |
| Token::Value op = Token::SUB; |
| TypeRecordingBinaryOpStub stub(op, mode); |
| __ CallStub(&stub); |
| + patch_site.EmitPatchInfo(); |
| + |
| __ bind(&done); |
| context()->Plug(eax); |
| } |
| @@ -1613,20 +1643,15 @@ |
| Token::Value op, |
| OverwriteMode mode, |
| Smi* value) { |
| - Label call_stub, smi_case, done; |
| + NearLabel call_stub, done; |
| int shift_value = value->value() & 0x1f; |
| __ test(eax, Immediate(kSmiTagMask)); |
| - __ j(zero, &smi_case); |
| + // Patch site. |
| + JumpPatchSite patch_site(masm_, &call_stub, not_zero); |
| + patch_site.EmitJump(); |
| - __ bind(&call_stub); |
| - __ mov(edx, eax); |
| - __ mov(eax, Immediate(value)); |
| - TypeRecordingBinaryOpStub stub(op, mode); |
| - __ CallStub(&stub); |
| - __ jmp(&done); |
| - |
| - __ bind(&smi_case); |
| + // Smi case. |
| switch (op) { |
| case Token::SHL: |
| if (shift_value != 0) { |
| @@ -1665,7 +1690,16 @@ |
| default: |
| UNREACHABLE(); |
| } |
| + __ jmp(&done); |
| + // Call stub. |
| + __ bind(&call_stub); |
| + __ mov(edx, eax); |
| + __ mov(eax, Immediate(value)); |
| + TypeRecordingBinaryOpStub stub(op, mode); |
| + __ CallStub(&stub); |
| + patch_site.EmitPatchInfo(); |
| + |
| __ bind(&done); |
| context()->Plug(eax); |
| } |
| @@ -1675,18 +1709,14 @@ |
| Token::Value op, |
| OverwriteMode mode, |
| Smi* value) { |
| - Label smi_case, done; |
| + NearLabel call_stub, done; |
| __ test(eax, Immediate(kSmiTagMask)); |
| - __ j(zero, &smi_case); |
| + // Patch site. The first invocation of the stub will be patch the jmp with |
| + // the required conditional jump. |
| + JumpPatchSite patch_site(masm_, &call_stub, not_zero); |
| + patch_site.EmitJump(); |
| - // The order of the arguments does not matter for bit-ops with a |
| - // constant operand. |
| - __ mov(edx, Immediate(value)); |
| - TypeRecordingBinaryOpStub stub(op, mode); |
| - __ CallStub(&stub); |
| - __ jmp(&done); |
| - |
| - __ bind(&smi_case); |
| + // Smi case. |
| switch (op) { |
| case Token::BIT_OR: |
| __ or_(Operand(eax), Immediate(value)); |
| @@ -1700,7 +1730,16 @@ |
| default: |
| UNREACHABLE(); |
| } |
| + __ jmp(&done); |
| + // The order of the arguments does not matter for bit-ops with a |
| + // constant operand. |
| + __ bind(&call_stub); |
| + __ mov(edx, Immediate(value)); |
| + TypeRecordingBinaryOpStub stub(op, mode); |
| + __ CallStub(&stub); |
| + patch_site.EmitPatchInfo(); |
| + |
| __ bind(&done); |
| context()->Plug(eax); |
| } |
| @@ -1753,20 +1792,15 @@ |
| // Do combined smi check of the operands. Left operand is on the |
| // stack. Right operand is in eax. |
| - Label done, stub_call, smi_case; |
| + NearLabel done, stub_call; |
| __ pop(edx); |
| __ mov(ecx, eax); |
| __ or_(eax, Operand(edx)); |
| __ test(eax, Immediate(kSmiTagMask)); |
| - __ j(zero, &smi_case); |
| + JumpPatchSite patch_site(masm_, &stub_call, not_zero); |
| + patch_site.EmitJump(); |
| - __ bind(&stub_call); |
| - __ mov(eax, ecx); |
| - TypeRecordingBinaryOpStub stub(op, mode); |
| - __ CallStub(&stub); |
| - __ jmp(&done); |
| - |
| - __ bind(&smi_case); |
| + // Smi case. |
| __ mov(eax, edx); // Copy left operand in case of a stub call. |
| switch (op) { |
| @@ -1834,7 +1868,14 @@ |
| default: |
| UNREACHABLE(); |
| } |
| + __ jmp(&done); |
| + __ bind(&stub_call); |
| + __ mov(eax, ecx); |
| + TypeRecordingBinaryOpStub stub(op, mode); |
| + __ CallStub(&stub); |
| + patch_site.EmitPatchInfo(); |
| + |
| __ bind(&done); |
| context()->Plug(eax); |
| } |
| @@ -1845,6 +1886,7 @@ |
| __ pop(edx); |
| TypeRecordingBinaryOpStub stub(op, mode); |
| __ CallStub(&stub); |
| + __ nop(); // Signals no inlined smi code. |
| context()->Plug(eax); |
| } |
| @@ -3709,6 +3751,7 @@ |
| // Inline smi case if we are in a loop. |
| NearLabel stub_call; |
| + JumpPatchSite patch_site(masm_, &stub_call, not_zero); |
| Label done; |
| if (ShouldInlineSmiCase(expr->op())) { |
| if (expr->op() == Token::INC) { |
| @@ -3720,7 +3763,9 @@ |
| // We could eliminate this smi check if we split the code at |
| // the first smi check before calling ToNumber. |
| __ test(eax, Immediate(kSmiTagMask)); |
| - __ j(zero, &done); |
| + patch_site.EmitJump(); |
| + __ jmp(&done); |
|
William Hesse
2010/12/09 16:27:58
Too bad that you have to put an extra jump here, t
|
| + |
| __ bind(&stub_call); |
| // Call stub. Undo operation first. |
| if (expr->op() == Token::INC) { |
| @@ -3739,8 +3784,14 @@ |
| TypeRecordingBinaryOpStub stub(expr->binary_op(), |
| NO_OVERWRITE); |
| __ CallStub(&stub); |
| + // Indicate here if smi code was inlined. |
| + if (ShouldInlineSmiCase(expr->op())) { |
| + patch_site.EmitPatchInfo(); |
| + } else { |
| + __ nop(); // Signals no inlined code. |
| + } |
| + |
| __ bind(&done); |
| - |
| // Store the value returned in eax. |
| switch (assign_type) { |
| case VARIABLE: |