OLD | NEW |
1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 45 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
56 : masm_(masm) { | 56 : masm_(masm) { |
57 #ifdef DEBUG | 57 #ifdef DEBUG |
58 info_emitted_ = false; | 58 info_emitted_ = false; |
59 #endif | 59 #endif |
60 } | 60 } |
61 | 61 |
62 ~JumpPatchSite() { | 62 ~JumpPatchSite() { |
63 ASSERT(patch_site_.is_bound() == info_emitted_); | 63 ASSERT(patch_site_.is_bound() == info_emitted_); |
64 } | 64 } |
65 | 65 |
66 void EmitJumpIfNotSmi(Register reg, NearLabel* target) { | 66 void EmitJumpIfNotSmi(Register reg, |
| 67 Label* target, |
| 68 Label::Distance near_jump = Label::kFar) { |
67 __ testb(reg, Immediate(kSmiTagMask)); | 69 __ testb(reg, Immediate(kSmiTagMask)); |
68 EmitJump(not_carry, target); // Always taken before patched. | 70 EmitJump(not_carry, target, near_jump); // Always taken before patched. |
69 } | 71 } |
70 | 72 |
71 void EmitJumpIfSmi(Register reg, NearLabel* target) { | 73 void EmitJumpIfSmi(Register reg, |
| 74 Label* target, |
| 75 Label::Distance near_jump = Label::kFar) { |
72 __ testb(reg, Immediate(kSmiTagMask)); | 76 __ testb(reg, Immediate(kSmiTagMask)); |
73 EmitJump(carry, target); // Never taken before patched. | 77 EmitJump(carry, target, near_jump); // Never taken before patched. |
74 } | 78 } |
75 | 79 |
76 void EmitPatchInfo() { | 80 void EmitPatchInfo() { |
77 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_); | 81 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_); |
78 ASSERT(is_int8(delta_to_patch_site)); | 82 ASSERT(is_int8(delta_to_patch_site)); |
79 __ testl(rax, Immediate(delta_to_patch_site)); | 83 __ testl(rax, Immediate(delta_to_patch_site)); |
80 #ifdef DEBUG | 84 #ifdef DEBUG |
81 info_emitted_ = true; | 85 info_emitted_ = true; |
82 #endif | 86 #endif |
83 } | 87 } |
84 | 88 |
85 bool is_bound() const { return patch_site_.is_bound(); } | 89 bool is_bound() const { return patch_site_.is_bound(); } |
86 | 90 |
87 private: | 91 private: |
88 // jc will be patched with jz, jnc will become jnz. | 92 // jc will be patched with jz, jnc will become jnz. |
89 void EmitJump(Condition cc, NearLabel* target) { | 93 void EmitJump(Condition cc, Label* target, Label::Distance near_jump) { |
90 ASSERT(!patch_site_.is_bound() && !info_emitted_); | 94 ASSERT(!patch_site_.is_bound() && !info_emitted_); |
91 ASSERT(cc == carry || cc == not_carry); | 95 ASSERT(cc == carry || cc == not_carry); |
92 __ bind(&patch_site_); | 96 __ bind(&patch_site_); |
93 __ j(cc, target); | 97 __ j(cc, target, near_jump); |
94 } | 98 } |
95 | 99 |
96 MacroAssembler* masm_; | 100 MacroAssembler* masm_; |
97 Label patch_site_; | 101 Label patch_site_; |
98 #ifdef DEBUG | 102 #ifdef DEBUG |
99 bool info_emitted_; | 103 bool info_emitted_; |
100 #endif | 104 #endif |
101 }; | 105 }; |
102 | 106 |
103 | 107 |
(...skipping 696 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
800 next_test.Unuse(); | 804 next_test.Unuse(); |
801 | 805 |
802 // Compile the label expression. | 806 // Compile the label expression. |
803 VisitForAccumulatorValue(clause->label()); | 807 VisitForAccumulatorValue(clause->label()); |
804 | 808 |
805 // Perform the comparison as if via '==='. | 809 // Perform the comparison as if via '==='. |
806 __ movq(rdx, Operand(rsp, 0)); // Switch value. | 810 __ movq(rdx, Operand(rsp, 0)); // Switch value. |
807 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); | 811 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); |
808 JumpPatchSite patch_site(masm_); | 812 JumpPatchSite patch_site(masm_); |
809 if (inline_smi_code) { | 813 if (inline_smi_code) { |
810 NearLabel slow_case; | 814 Label slow_case; |
811 __ movq(rcx, rdx); | 815 __ movq(rcx, rdx); |
812 __ or_(rcx, rax); | 816 __ or_(rcx, rax); |
813 patch_site.EmitJumpIfNotSmi(rcx, &slow_case); | 817 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear); |
814 | 818 |
815 __ cmpq(rdx, rax); | 819 __ cmpq(rdx, rax); |
816 __ j(not_equal, &next_test); | 820 __ j(not_equal, &next_test); |
817 __ Drop(1); // Switch value is no longer needed. | 821 __ Drop(1); // Switch value is no longer needed. |
818 __ jmp(clause->body_target()); | 822 __ jmp(clause->body_target()); |
819 __ bind(&slow_case); | 823 __ bind(&slow_case); |
820 } | 824 } |
821 | 825 |
822 // Record position before stub call for type feedback. | 826 // Record position before stub call for type feedback. |
823 SetSourcePosition(clause->position()); | 827 SetSourcePosition(clause->position()); |
(...skipping 840 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1664 | 1668 |
1665 | 1669 |
1666 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, | 1670 void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr, |
1667 Token::Value op, | 1671 Token::Value op, |
1668 OverwriteMode mode, | 1672 OverwriteMode mode, |
1669 Expression* left, | 1673 Expression* left, |
1670 Expression* right) { | 1674 Expression* right) { |
1671 // Do combined smi check of the operands. Left operand is on the | 1675 // Do combined smi check of the operands. Left operand is on the |
1672 // stack (popped into rdx). Right operand is in rax but moved into | 1676 // stack (popped into rdx). Right operand is in rax but moved into |
1673 // rcx to make the shifts easier. | 1677 // rcx to make the shifts easier. |
1674 NearLabel stub_call, smi_case; | 1678 Label done, stub_call, smi_case; |
1675 Label done; | |
1676 __ pop(rdx); | 1679 __ pop(rdx); |
1677 __ movq(rcx, rax); | 1680 __ movq(rcx, rax); |
1678 __ or_(rax, rdx); | 1681 __ or_(rax, rdx); |
1679 JumpPatchSite patch_site(masm_); | 1682 JumpPatchSite patch_site(masm_); |
1680 patch_site.EmitJumpIfSmi(rax, &smi_case); | 1683 patch_site.EmitJumpIfSmi(rax, &smi_case, Label::kNear); |
1681 | 1684 |
1682 __ bind(&stub_call); | 1685 __ bind(&stub_call); |
1683 __ movq(rax, rcx); | 1686 __ movq(rax, rcx); |
1684 TypeRecordingBinaryOpStub stub(op, mode); | 1687 TypeRecordingBinaryOpStub stub(op, mode); |
1685 EmitCallIC(stub.GetCode(), &patch_site, expr->id()); | 1688 EmitCallIC(stub.GetCode(), &patch_site, expr->id()); |
1686 __ jmp(&done, Label::kNear); | 1689 __ jmp(&done, Label::kNear); |
1687 | 1690 |
1688 __ bind(&smi_case); | 1691 __ bind(&smi_case); |
1689 switch (op) { | 1692 switch (op) { |
1690 case Token::SAR: | 1693 case Token::SAR: |
(...skipping 2140 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3831 __ movq(Operand(rsp, kPointerSize), rax); | 3834 __ movq(Operand(rsp, kPointerSize), rax); |
3832 break; | 3835 break; |
3833 case KEYED_PROPERTY: | 3836 case KEYED_PROPERTY: |
3834 __ movq(Operand(rsp, 2 * kPointerSize), rax); | 3837 __ movq(Operand(rsp, 2 * kPointerSize), rax); |
3835 break; | 3838 break; |
3836 } | 3839 } |
3837 } | 3840 } |
3838 } | 3841 } |
3839 | 3842 |
3840 // Inline smi case if we are in a loop. | 3843 // Inline smi case if we are in a loop. |
3841 NearLabel done; | 3844 Label done, stub_call; |
3842 Label stub_call; | |
3843 JumpPatchSite patch_site(masm_); | 3845 JumpPatchSite patch_site(masm_); |
3844 | 3846 |
3845 if (ShouldInlineSmiCase(expr->op())) { | 3847 if (ShouldInlineSmiCase(expr->op())) { |
3846 if (expr->op() == Token::INC) { | 3848 if (expr->op() == Token::INC) { |
3847 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); | 3849 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); |
3848 } else { | 3850 } else { |
3849 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); | 3851 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); |
3850 } | 3852 } |
3851 __ j(overflow, &stub_call, Label::kNear); | 3853 __ j(overflow, &stub_call, Label::kNear); |
3852 // We could eliminate this smi check if we split the code at | 3854 // We could eliminate this smi check if we split the code at |
3853 // the first smi check before calling ToNumber. | 3855 // the first smi check before calling ToNumber. |
3854 patch_site.EmitJumpIfSmi(rax, &done); | 3856 patch_site.EmitJumpIfSmi(rax, &done, Label::kNear); |
3855 | 3857 |
3856 __ bind(&stub_call); | 3858 __ bind(&stub_call); |
3857 // Call stub. Undo operation first. | 3859 // Call stub. Undo operation first. |
3858 if (expr->op() == Token::INC) { | 3860 if (expr->op() == Token::INC) { |
3859 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); | 3861 __ SmiSubConstant(rax, rax, Smi::FromInt(1)); |
3860 } else { | 3862 } else { |
3861 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); | 3863 __ SmiAddConstant(rax, rax, Smi::FromInt(1)); |
3862 } | 3864 } |
3863 } | 3865 } |
3864 | 3866 |
(...skipping 264 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4129 break; | 4131 break; |
4130 case Token::IN: | 4132 case Token::IN: |
4131 case Token::INSTANCEOF: | 4133 case Token::INSTANCEOF: |
4132 default: | 4134 default: |
4133 UNREACHABLE(); | 4135 UNREACHABLE(); |
4134 } | 4136 } |
4135 | 4137 |
4136 bool inline_smi_code = ShouldInlineSmiCase(op); | 4138 bool inline_smi_code = ShouldInlineSmiCase(op); |
4137 JumpPatchSite patch_site(masm_); | 4139 JumpPatchSite patch_site(masm_); |
4138 if (inline_smi_code) { | 4140 if (inline_smi_code) { |
4139 NearLabel slow_case; | 4141 Label slow_case; |
4140 __ movq(rcx, rdx); | 4142 __ movq(rcx, rdx); |
4141 __ or_(rcx, rax); | 4143 __ or_(rcx, rax); |
4142 patch_site.EmitJumpIfNotSmi(rcx, &slow_case); | 4144 patch_site.EmitJumpIfNotSmi(rcx, &slow_case, Label::kNear); |
4143 __ cmpq(rdx, rax); | 4145 __ cmpq(rdx, rax); |
4144 Split(cc, if_true, if_false, NULL); | 4146 Split(cc, if_true, if_false, NULL); |
4145 __ bind(&slow_case); | 4147 __ bind(&slow_case); |
4146 } | 4148 } |
4147 | 4149 |
4148 // Record position and call the compare IC. | 4150 // Record position and call the compare IC. |
4149 SetSourcePosition(expr->position()); | 4151 SetSourcePosition(expr->position()); |
4150 Handle<Code> ic = CompareIC::GetUninitialized(op); | 4152 Handle<Code> ic = CompareIC::GetUninitialized(op); |
4151 EmitCallIC(ic, &patch_site, expr->id()); | 4153 EmitCallIC(ic, &patch_site, expr->id()); |
4152 | 4154 |
(...skipping 152 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4305 __ ret(0); | 4307 __ ret(0); |
4306 } | 4308 } |
4307 | 4309 |
4308 | 4310 |
4309 #undef __ | 4311 #undef __ |
4310 | 4312 |
4311 | 4313 |
4312 } } // namespace v8::internal | 4314 } } // namespace v8::internal |
4313 | 4315 |
4314 #endif // V8_TARGET_ARCH_X64 | 4316 #endif // V8_TARGET_ARCH_X64 |
OLD | NEW |