Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(328)

Side by Side Diff: src/ia32/full-codegen-ia32.cc

Issue 6062002: Merge 6006:6095 from bleeding_edge to experimental/gc branch. (Closed) Base URL: http://v8.googlecode.com/svn/branches/experimental/gc/
Patch Set: Created 10 years ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/ia32/disasm-ia32.cc ('k') | src/ia32/ic-ia32.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2010 the V8 project authors. All rights reserved. 1 // Copyright 2010 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 23 matching lines...) Expand all
34 #include "compiler.h" 34 #include "compiler.h"
35 #include "debug.h" 35 #include "debug.h"
36 #include "full-codegen.h" 36 #include "full-codegen.h"
37 #include "parser.h" 37 #include "parser.h"
38 #include "scopes.h" 38 #include "scopes.h"
39 #include "stub-cache.h" 39 #include "stub-cache.h"
40 40
41 namespace v8 { 41 namespace v8 {
42 namespace internal { 42 namespace internal {
43 43
44
44 #define __ ACCESS_MASM(masm_) 45 #define __ ACCESS_MASM(masm_)
45 46
47
48 class JumpPatchSite BASE_EMBEDDED {
49 public:
50 explicit JumpPatchSite(MacroAssembler* masm)
51 : masm_(masm) {
52 #ifdef DEBUG
53 info_emitted_ = false;
54 #endif
55 }
56
57 ~JumpPatchSite() {
58 ASSERT(patch_site_.is_bound() == info_emitted_);
59 }
60
61 void EmitJumpIfNotSmi(Register reg, NearLabel* target) {
62 __ test(reg, Immediate(kSmiTagMask));
63 EmitJump(not_carry, target); // Always taken before patched.
64 }
65
66 void EmitJumpIfSmi(Register reg, NearLabel* target) {
67 __ test(reg, Immediate(kSmiTagMask));
68 EmitJump(carry, target); // Never taken before patched.
69 }
70
71 void EmitPatchInfo() {
72 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
73 ASSERT(is_int8(delta_to_patch_site));
74 __ test(eax, Immediate(delta_to_patch_site));
75 #ifdef DEBUG
76 info_emitted_ = true;
77 #endif
78 }
79
80 bool is_bound() const { return patch_site_.is_bound(); }
81
82 private:
83 // jc will be patched with jz, jnc will become jnz.
84 void EmitJump(Condition cc, NearLabel* target) {
85 ASSERT(!patch_site_.is_bound() && !info_emitted_);
86 ASSERT(cc == carry || cc == not_carry);
87 __ bind(&patch_site_);
88 __ j(cc, target);
89 }
90
91 MacroAssembler* masm_;
92 Label patch_site_;
93 #ifdef DEBUG
94 bool info_emitted_;
95 #endif
96 };
97
98
46 // Generate code for a JS function. On entry to the function the receiver 99 // Generate code for a JS function. On entry to the function the receiver
47 // and arguments have been pushed on the stack left to right, with the 100 // and arguments have been pushed on the stack left to right, with the
48 // return address on top of them. The actual argument count matches the 101 // return address on top of them. The actual argument count matches the
49 // formal parameter count expected by the function. 102 // formal parameter count expected by the function.
50 // 103 //
51 // The live registers are: 104 // The live registers are:
52 // o edi: the JS function object being called (ie, ourselves) 105 // o edi: the JS function object being called (ie, ourselves)
53 // o esi: our context 106 // o esi: our context
54 // o ebp: our caller's frame pointer 107 // o ebp: our caller's frame pointer
55 // o esp: stack pointer (pointing to return address) 108 // o esp: stack pointer (pointing to return address)
(...skipping 137 matching lines...) Expand 10 before | Expand all | Expand 10 after
193 } 246 }
194 247
195 { Comment cmnt(masm_, "[ return <undefined>;"); 248 { Comment cmnt(masm_, "[ return <undefined>;");
196 // Emit a 'return undefined' in case control fell off the end of the body. 249 // Emit a 'return undefined' in case control fell off the end of the body.
197 __ mov(eax, Factory::undefined_value()); 250 __ mov(eax, Factory::undefined_value());
198 EmitReturnSequence(); 251 EmitReturnSequence();
199 } 252 }
200 } 253 }
201 254
202 255
256 void FullCodeGenerator::ClearAccumulator() {
257 __ Set(eax, Immediate(Smi::FromInt(0)));
258 }
259
260
203 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) { 261 void FullCodeGenerator::EmitStackCheck(IterationStatement* stmt) {
204 Comment cmnt(masm_, "[ Stack check"); 262 Comment cmnt(masm_, "[ Stack check");
205 NearLabel ok; 263 NearLabel ok;
206 ExternalReference stack_limit = ExternalReference::address_of_stack_limit(); 264 ExternalReference stack_limit = ExternalReference::address_of_stack_limit();
207 __ cmp(esp, Operand::StaticVariable(stack_limit)); 265 __ cmp(esp, Operand::StaticVariable(stack_limit));
208 __ j(above_equal, &ok, taken); 266 __ j(above_equal, &ok, taken);
209 StackCheckStub stub; 267 StackCheckStub stub;
210 __ CallStub(&stub); 268 __ CallStub(&stub);
211 __ bind(&ok); 269 __ bind(&ok);
212 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS); 270 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
(...skipping 474 matching lines...) Expand 10 before | Expand all | Expand 10 after
687 __ CallRuntime(Runtime::kDeclareGlobals, 3); 745 __ CallRuntime(Runtime::kDeclareGlobals, 3);
688 // Return value is ignored. 746 // Return value is ignored.
689 } 747 }
690 748
691 749
692 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { 750 void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
693 Comment cmnt(masm_, "[ SwitchStatement"); 751 Comment cmnt(masm_, "[ SwitchStatement");
694 Breakable nested_statement(this, stmt); 752 Breakable nested_statement(this, stmt);
695 SetStatementPosition(stmt); 753 SetStatementPosition(stmt);
696 754
697 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
698
699 // Keep the switch value on the stack until a case matches. 755 // Keep the switch value on the stack until a case matches.
700 VisitForStackValue(stmt->tag()); 756 VisitForStackValue(stmt->tag());
757 PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
701 758
702 ZoneList<CaseClause*>* clauses = stmt->cases(); 759 ZoneList<CaseClause*>* clauses = stmt->cases();
703 CaseClause* default_clause = NULL; // Can occur anywhere in the list. 760 CaseClause* default_clause = NULL; // Can occur anywhere in the list.
704 761
705 Label next_test; // Recycled for each test. 762 Label next_test; // Recycled for each test.
706 // Compile all the tests with branches to their bodies. 763 // Compile all the tests with branches to their bodies.
707 for (int i = 0; i < clauses->length(); i++) { 764 for (int i = 0; i < clauses->length(); i++) {
708 CaseClause* clause = clauses->at(i); 765 CaseClause* clause = clauses->at(i);
709 // The default is not a test, but remember it as final fall through. 766 // The default is not a test, but remember it as final fall through.
710 if (clause->is_default()) { 767 if (clause->is_default()) {
711 default_clause = clause; 768 default_clause = clause;
712 continue; 769 continue;
713 } 770 }
714 771
715 Comment cmnt(masm_, "[ Case comparison"); 772 Comment cmnt(masm_, "[ Case comparison");
716 __ bind(&next_test); 773 __ bind(&next_test);
717 next_test.Unuse(); 774 next_test.Unuse();
718 775
719 // Compile the label expression. 776 // Compile the label expression.
720 VisitForAccumulatorValue(clause->label()); 777 VisitForAccumulatorValue(clause->label());
721 778
722 // Perform the comparison as if via '==='. 779 // Perform the comparison as if via '==='.
723 __ mov(edx, Operand(esp, 0)); // Switch value. 780 __ mov(edx, Operand(esp, 0)); // Switch value.
724 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); 781 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
782 JumpPatchSite patch_site(masm_);
725 if (inline_smi_code) { 783 if (inline_smi_code) {
726 NearLabel slow_case; 784 NearLabel slow_case;
727 __ mov(ecx, edx); 785 __ mov(ecx, edx);
728 __ or_(ecx, Operand(eax)); 786 __ or_(ecx, Operand(eax));
729 __ test(ecx, Immediate(kSmiTagMask)); 787 patch_site.EmitJumpIfNotSmi(ecx, &slow_case);
730 __ j(not_zero, &slow_case, not_taken); 788
731 __ cmp(edx, Operand(eax)); 789 __ cmp(edx, Operand(eax));
732 __ j(not_equal, &next_test); 790 __ j(not_equal, &next_test);
733 __ Drop(1); // Switch value is no longer needed. 791 __ Drop(1); // Switch value is no longer needed.
734 __ jmp(clause->body_target()->entry_label()); 792 __ jmp(clause->body_target()->entry_label());
735 __ bind(&slow_case); 793 __ bind(&slow_case);
736 } 794 }
737 795
738 // Record position before stub call for type feedback. 796 // Record position before stub call for type feedback.
739 SetSourcePosition(clause->position()); 797 SetSourcePosition(clause->position());
740
741 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT); 798 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
742 __ call(ic, RelocInfo::CODE_TARGET); 799 EmitCallIC(ic, &patch_site);
743 800
744 __ test(eax, Operand(eax)); 801 __ test(eax, Operand(eax));
745 __ j(not_equal, &next_test); 802 __ j(not_equal, &next_test);
746 __ Drop(1); // Switch value is no longer needed. 803 __ Drop(1); // Switch value is no longer needed.
747 __ jmp(clause->body_target()->entry_label()); 804 __ jmp(clause->body_target()->entry_label());
748 } 805 }
749 806
750 // Discard the test value and jump to the default if present, otherwise to 807 // Discard the test value and jump to the default if present, otherwise to
751 // the end of the statement. 808 // the end of the statement.
752 __ bind(&next_test); 809 __ bind(&next_test);
(...skipping 805 matching lines...) Expand 10 before | Expand all | Expand 10 after
1558 SetSourcePosition(prop->position()); 1615 SetSourcePosition(prop->position());
1559 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); 1616 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
1560 EmitCallIC(ic, RelocInfo::CODE_TARGET); 1617 EmitCallIC(ic, RelocInfo::CODE_TARGET);
1561 } 1618 }
1562 1619
1563 1620
1564 void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr, 1621 void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr,
1565 OverwriteMode mode, 1622 OverwriteMode mode,
1566 bool left_is_constant_smi, 1623 bool left_is_constant_smi,
1567 Smi* value) { 1624 Smi* value) {
1568 NearLabel call_stub; 1625 NearLabel call_stub, done;
1569 Label done;
1570 __ add(Operand(eax), Immediate(value)); 1626 __ add(Operand(eax), Immediate(value));
1571 __ j(overflow, &call_stub); 1627 __ j(overflow, &call_stub);
1572 __ test(eax, Immediate(kSmiTagMask)); 1628 JumpPatchSite patch_site(masm_);
1573 __ j(zero, &done); 1629 patch_site.EmitJumpIfSmi(eax, &done);
1574 1630
1575 // Undo the optimistic add operation and call the shared stub. 1631 // Undo the optimistic add operation and call the shared stub.
1576 __ bind(&call_stub); 1632 __ bind(&call_stub);
1577 __ sub(Operand(eax), Immediate(value)); 1633 __ sub(Operand(eax), Immediate(value));
1578 Token::Value op = Token::ADD; 1634 Token::Value op = Token::ADD;
1579 TypeRecordingBinaryOpStub stub(op, mode); 1635 TypeRecordingBinaryOpStub stub(op, mode);
1580 if (left_is_constant_smi) { 1636 if (left_is_constant_smi) {
1581 __ mov(edx, Immediate(value)); 1637 __ mov(edx, Immediate(value));
1582 } else { 1638 } else {
1583 __ mov(edx, eax); 1639 __ mov(edx, eax);
1584 __ mov(eax, Immediate(value)); 1640 __ mov(eax, Immediate(value));
1585 } 1641 }
1586 __ CallStub(&stub); 1642 EmitCallIC(stub.GetCode(), &patch_site);
1643
1587 __ bind(&done); 1644 __ bind(&done);
1588 context()->Plug(eax); 1645 context()->Plug(eax);
1589 } 1646 }
1590 1647
1591 1648
1592 void FullCodeGenerator::EmitConstantSmiSub(Expression* expr, 1649 void FullCodeGenerator::EmitConstantSmiSub(Expression* expr,
1593 OverwriteMode mode, 1650 OverwriteMode mode,
1594 bool left_is_constant_smi, 1651 bool left_is_constant_smi,
1595 Smi* value) { 1652 Smi* value) {
1596 Label call_stub, done; 1653 NearLabel call_stub, done;
1597 if (left_is_constant_smi) { 1654 if (left_is_constant_smi) {
1598 __ mov(ecx, eax); 1655 __ mov(ecx, eax);
1599 __ mov(eax, Immediate(value)); 1656 __ mov(eax, Immediate(value));
1600 __ sub(Operand(eax), ecx); 1657 __ sub(Operand(eax), ecx);
1601 } else { 1658 } else {
1602 __ sub(Operand(eax), Immediate(value)); 1659 __ sub(Operand(eax), Immediate(value));
1603 } 1660 }
1604 __ j(overflow, &call_stub); 1661 __ j(overflow, &call_stub);
1605 __ test(eax, Immediate(kSmiTagMask)); 1662 JumpPatchSite patch_site(masm_);
1606 __ j(zero, &done); 1663 patch_site.EmitJumpIfSmi(eax, &done);
1607 1664
1608 __ bind(&call_stub); 1665 __ bind(&call_stub);
1609 if (left_is_constant_smi) { 1666 if (left_is_constant_smi) {
1610 __ mov(edx, Immediate(value)); 1667 __ mov(edx, Immediate(value));
1611 __ mov(eax, ecx); 1668 __ mov(eax, ecx);
1612 } else { 1669 } else {
1613 __ add(Operand(eax), Immediate(value)); // Undo the subtraction. 1670 __ add(Operand(eax), Immediate(value)); // Undo the subtraction.
1614 __ mov(edx, eax); 1671 __ mov(edx, eax);
1615 __ mov(eax, Immediate(value)); 1672 __ mov(eax, Immediate(value));
1616 } 1673 }
1617 Token::Value op = Token::SUB; 1674 Token::Value op = Token::SUB;
1618 TypeRecordingBinaryOpStub stub(op, mode); 1675 TypeRecordingBinaryOpStub stub(op, mode);
1619 __ CallStub(&stub); 1676 EmitCallIC(stub.GetCode(), &patch_site);
1677
1620 __ bind(&done); 1678 __ bind(&done);
1621 context()->Plug(eax); 1679 context()->Plug(eax);
1622 } 1680 }
1623 1681
1624 1682
1625 void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr, 1683 void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr,
1626 Token::Value op, 1684 Token::Value op,
1627 OverwriteMode mode, 1685 OverwriteMode mode,
1628 Smi* value) { 1686 Smi* value) {
1629 Label call_stub, smi_case, done; 1687 NearLabel call_stub, smi_case, done;
1630 int shift_value = value->value() & 0x1f; 1688 int shift_value = value->value() & 0x1f;
1631 1689
1632 __ test(eax, Immediate(kSmiTagMask)); 1690 JumpPatchSite patch_site(masm_);
1633 __ j(zero, &smi_case); 1691 patch_site.EmitJumpIfSmi(eax, &smi_case);
1634 1692
1693 // Call stub.
1635 __ bind(&call_stub); 1694 __ bind(&call_stub);
1636 __ mov(edx, eax); 1695 __ mov(edx, eax);
1637 __ mov(eax, Immediate(value)); 1696 __ mov(eax, Immediate(value));
1638 TypeRecordingBinaryOpStub stub(op, mode); 1697 TypeRecordingBinaryOpStub stub(op, mode);
1639 __ CallStub(&stub); 1698 EmitCallIC(stub.GetCode(), &patch_site);
1640 __ jmp(&done); 1699 __ jmp(&done);
1641 1700
1701 // Smi case.
1642 __ bind(&smi_case); 1702 __ bind(&smi_case);
1643 switch (op) { 1703 switch (op) {
1644 case Token::SHL: 1704 case Token::SHL:
1645 if (shift_value != 0) { 1705 if (shift_value != 0) {
1646 __ mov(edx, eax); 1706 __ mov(edx, eax);
1647 if (shift_value > 1) { 1707 if (shift_value > 1) {
1648 __ shl(edx, shift_value - 1); 1708 __ shl(edx, shift_value - 1);
1649 } 1709 }
1650 // Convert int result to smi, checking that it is in int range. 1710 // Convert int result to smi, checking that it is in int range.
1651 ASSERT(kSmiTagSize == 1); // Adjust code if not the case. 1711 ASSERT(kSmiTagSize == 1); // Adjust code if not the case.
(...skipping 29 matching lines...) Expand all
1681 1741
1682 __ bind(&done); 1742 __ bind(&done);
1683 context()->Plug(eax); 1743 context()->Plug(eax);
1684 } 1744 }
1685 1745
1686 1746
1687 void FullCodeGenerator::EmitConstantSmiBitOp(Expression* expr, 1747 void FullCodeGenerator::EmitConstantSmiBitOp(Expression* expr,
1688 Token::Value op, 1748 Token::Value op,
1689 OverwriteMode mode, 1749 OverwriteMode mode,
1690 Smi* value) { 1750 Smi* value) {
1691 Label smi_case, done; 1751 NearLabel smi_case, done;
1692 __ test(eax, Immediate(kSmiTagMask)); 1752
1693 __ j(zero, &smi_case); 1753 JumpPatchSite patch_site(masm_);
1754 patch_site.EmitJumpIfSmi(eax, &smi_case);
1694 1755
1695 // The order of the arguments does not matter for bit-ops with a 1756 // The order of the arguments does not matter for bit-ops with a
1696 // constant operand. 1757 // constant operand.
1697 __ mov(edx, Immediate(value)); 1758 __ mov(edx, Immediate(value));
1698 TypeRecordingBinaryOpStub stub(op, mode); 1759 TypeRecordingBinaryOpStub stub(op, mode);
1699 __ CallStub(&stub); 1760 EmitCallIC(stub.GetCode(), &patch_site);
1700 __ jmp(&done); 1761 __ jmp(&done);
1701 1762
1763 // Smi case.
1702 __ bind(&smi_case); 1764 __ bind(&smi_case);
1703 switch (op) { 1765 switch (op) {
1704 case Token::BIT_OR: 1766 case Token::BIT_OR:
1705 __ or_(Operand(eax), Immediate(value)); 1767 __ or_(Operand(eax), Immediate(value));
1706 break; 1768 break;
1707 case Token::BIT_XOR: 1769 case Token::BIT_XOR:
1708 __ xor_(Operand(eax), Immediate(value)); 1770 __ xor_(Operand(eax), Immediate(value));
1709 break; 1771 break;
1710 case Token::BIT_AND: 1772 case Token::BIT_AND:
1711 __ and_(Operand(eax), Immediate(value)); 1773 __ and_(Operand(eax), Immediate(value));
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after
1759 EmitConstantSmiBinaryOp(expr, op, mode, false, value); 1821 EmitConstantSmiBinaryOp(expr, op, mode, false, value);
1760 return; 1822 return;
1761 } else if (constant == kLeftConstant) { 1823 } else if (constant == kLeftConstant) {
1762 Smi* value = Smi::cast(*left->AsLiteral()->handle()); 1824 Smi* value = Smi::cast(*left->AsLiteral()->handle());
1763 EmitConstantSmiBinaryOp(expr, op, mode, true, value); 1825 EmitConstantSmiBinaryOp(expr, op, mode, true, value);
1764 return; 1826 return;
1765 } 1827 }
1766 1828
1767 // Do combined smi check of the operands. Left operand is on the 1829 // Do combined smi check of the operands. Left operand is on the
1768 // stack. Right operand is in eax. 1830 // stack. Right operand is in eax.
1769 Label done, stub_call, smi_case; 1831 NearLabel done, smi_case, stub_call;
1770 __ pop(edx); 1832 __ pop(edx);
1771 __ mov(ecx, eax); 1833 __ mov(ecx, eax);
1772 __ or_(eax, Operand(edx)); 1834 __ or_(eax, Operand(edx));
1773 __ test(eax, Immediate(kSmiTagMask)); 1835 JumpPatchSite patch_site(masm_);
1774 __ j(zero, &smi_case); 1836 patch_site.EmitJumpIfSmi(eax, &smi_case);
1775 1837
1776 __ bind(&stub_call); 1838 __ bind(&stub_call);
1777 __ mov(eax, ecx); 1839 __ mov(eax, ecx);
1778 TypeRecordingBinaryOpStub stub(op, mode); 1840 TypeRecordingBinaryOpStub stub(op, mode);
1779 __ CallStub(&stub); 1841 EmitCallIC(stub.GetCode(), &patch_site);
1780 __ jmp(&done); 1842 __ jmp(&done);
1781 1843
1844 // Smi case.
1782 __ bind(&smi_case); 1845 __ bind(&smi_case);
1783 __ mov(eax, edx); // Copy left operand in case of a stub call. 1846 __ mov(eax, edx); // Copy left operand in case of a stub call.
1784 1847
1785 switch (op) { 1848 switch (op) {
1786 case Token::SAR: 1849 case Token::SAR:
1787 __ SmiUntag(eax); 1850 __ SmiUntag(eax);
1788 __ SmiUntag(ecx); 1851 __ SmiUntag(ecx);
1789 __ sar_cl(eax); // No checks of result necessary 1852 __ sar_cl(eax); // No checks of result necessary
1790 __ SmiTag(eax); 1853 __ SmiTag(eax);
1791 break; 1854 break;
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after
1850 1913
1851 __ bind(&done); 1914 __ bind(&done);
1852 context()->Plug(eax); 1915 context()->Plug(eax);
1853 } 1916 }
1854 1917
1855 1918
1856 void FullCodeGenerator::EmitBinaryOp(Token::Value op, 1919 void FullCodeGenerator::EmitBinaryOp(Token::Value op,
1857 OverwriteMode mode) { 1920 OverwriteMode mode) {
1858 __ pop(edx); 1921 __ pop(edx);
1859 TypeRecordingBinaryOpStub stub(op, mode); 1922 TypeRecordingBinaryOpStub stub(op, mode);
1860 __ CallStub(&stub); 1923 EmitCallIC(stub.GetCode(), NULL); // NULL signals no inlined smi code.
1861 context()->Plug(eax); 1924 context()->Plug(eax);
1862 } 1925 }
1863 1926
1864 1927
1865 void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) { 1928 void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
1866 // Invalid left-hand sides are rewritten to have a 'throw 1929 // Invalid left-hand sides are rewritten to have a 'throw
1867 // ReferenceError' on the left-hand side. 1930 // ReferenceError' on the left-hand side.
1868 if (!expr->IsValidLeftHandSide()) { 1931 if (!expr->IsValidLeftHandSide()) {
1869 VisitForEffect(expr); 1932 VisitForEffect(expr);
1870 return; 1933 return;
(...skipping 1141 matching lines...) Expand 10 before | Expand all | Expand 10 after
3012 VisitForStackValue(args->at(1)); 3075 VisitForStackValue(args->at(1));
3013 3076
3014 StringCompareStub stub; 3077 StringCompareStub stub;
3015 __ CallStub(&stub); 3078 __ CallStub(&stub);
3016 context()->Plug(eax); 3079 context()->Plug(eax);
3017 } 3080 }
3018 3081
3019 3082
3020 void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) { 3083 void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
3021 // Load the argument on the stack and call the stub. 3084 // Load the argument on the stack and call the stub.
3022 TranscendentalCacheStub stub(TranscendentalCache::SIN); 3085 TranscendentalCacheStub stub(TranscendentalCache::SIN,
3086 TranscendentalCacheStub::TAGGED);
3023 ASSERT(args->length() == 1); 3087 ASSERT(args->length() == 1);
3024 VisitForStackValue(args->at(0)); 3088 VisitForStackValue(args->at(0));
3025 __ CallStub(&stub); 3089 __ CallStub(&stub);
3026 context()->Plug(eax); 3090 context()->Plug(eax);
3027 } 3091 }
3028 3092
3029 3093
3030 void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) { 3094 void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
3031 // Load the argument on the stack and call the stub. 3095 // Load the argument on the stack and call the stub.
3032 TranscendentalCacheStub stub(TranscendentalCache::COS); 3096 TranscendentalCacheStub stub(TranscendentalCache::COS,
3097 TranscendentalCacheStub::TAGGED);
3033 ASSERT(args->length() == 1); 3098 ASSERT(args->length() == 1);
3034 VisitForStackValue(args->at(0)); 3099 VisitForStackValue(args->at(0));
3035 __ CallStub(&stub); 3100 __ CallStub(&stub);
3036 context()->Plug(eax); 3101 context()->Plug(eax);
3037 } 3102 }
3038 3103
3039 3104
3040 void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) { 3105 void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
3041 // Load the argument on the stack and call the stub. 3106 // Load the argument on the stack and call the stub.
3042 TranscendentalCacheStub stub(TranscendentalCache::LOG); 3107 TranscendentalCacheStub stub(TranscendentalCache::LOG,
3108 TranscendentalCacheStub::TAGGED);
3043 ASSERT(args->length() == 1); 3109 ASSERT(args->length() == 1);
3044 VisitForStackValue(args->at(0)); 3110 VisitForStackValue(args->at(0));
3045 __ CallStub(&stub); 3111 __ CallStub(&stub);
3046 context()->Plug(eax); 3112 context()->Plug(eax);
3047 } 3113 }
3048 3114
3049 3115
3050 void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) { 3116 void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) {
3051 // Load the argument on the stack and call the runtime function. 3117 // Load the argument on the stack and call the runtime function.
3052 ASSERT(args->length() == 1); 3118 ASSERT(args->length() == 1);
(...skipping 63 matching lines...) Expand 10 before | Expand all | Expand 10 after
3116 __ j(not_equal, &slow_case); 3182 __ j(not_equal, &slow_case);
3117 3183
3118 // Check that both indices are smis. 3184 // Check that both indices are smis.
3119 __ mov(index_1, Operand(esp, 1 * kPointerSize)); 3185 __ mov(index_1, Operand(esp, 1 * kPointerSize));
3120 __ mov(index_2, Operand(esp, 0)); 3186 __ mov(index_2, Operand(esp, 0));
3121 __ mov(temp, index_1); 3187 __ mov(temp, index_1);
3122 __ or_(temp, Operand(index_2)); 3188 __ or_(temp, Operand(index_2));
3123 __ test(temp, Immediate(kSmiTagMask)); 3189 __ test(temp, Immediate(kSmiTagMask));
3124 __ j(not_zero, &slow_case); 3190 __ j(not_zero, &slow_case);
3125 3191
3192 // Check that both indices are valid.
3193 __ mov(temp, FieldOperand(object, JSArray::kLengthOffset));
3194 __ cmp(temp, Operand(index_1));
3195 __ j(below_equal, &slow_case);
3196 __ cmp(temp, Operand(index_2));
3197 __ j(below_equal, &slow_case);
3198
3126 // Bring addresses into index1 and index2. 3199 // Bring addresses into index1 and index2.
3127 __ lea(index_1, CodeGenerator::FixedArrayElementOperand(elements, index_1)); 3200 __ lea(index_1, CodeGenerator::FixedArrayElementOperand(elements, index_1));
3128 __ lea(index_2, CodeGenerator::FixedArrayElementOperand(elements, index_2)); 3201 __ lea(index_2, CodeGenerator::FixedArrayElementOperand(elements, index_2));
3129 3202
3130 // Swap elements. Use object and temp as scratch registers. 3203 // Swap elements. Use object and temp as scratch registers.
3131 __ mov(object, Operand(index_1, 0)); 3204 __ mov(object, Operand(index_1, 0));
3132 __ mov(temp, Operand(index_2, 0)); 3205 __ mov(temp, Operand(index_2, 0));
3133 __ mov(Operand(index_2, 0), object); 3206 __ mov(Operand(index_2, 0), object);
3134 __ mov(Operand(index_1, 0), temp); 3207 __ mov(Operand(index_1, 0), temp);
3135 3208
(...skipping 588 matching lines...) Expand 10 before | Expand all | Expand 10 after
3724 __ mov(Operand(esp, kPointerSize), eax); 3797 __ mov(Operand(esp, kPointerSize), eax);
3725 break; 3798 break;
3726 case KEYED_PROPERTY: 3799 case KEYED_PROPERTY:
3727 __ mov(Operand(esp, 2 * kPointerSize), eax); 3800 __ mov(Operand(esp, 2 * kPointerSize), eax);
3728 break; 3801 break;
3729 } 3802 }
3730 } 3803 }
3731 } 3804 }
3732 3805
3733 // Inline smi case if we are in a loop. 3806 // Inline smi case if we are in a loop.
3734 NearLabel stub_call; 3807 NearLabel stub_call, done;
3735 Label done; 3808 JumpPatchSite patch_site(masm_);
3809
3736 if (ShouldInlineSmiCase(expr->op())) { 3810 if (ShouldInlineSmiCase(expr->op())) {
3737 if (expr->op() == Token::INC) { 3811 if (expr->op() == Token::INC) {
3738 __ add(Operand(eax), Immediate(Smi::FromInt(1))); 3812 __ add(Operand(eax), Immediate(Smi::FromInt(1)));
3739 } else { 3813 } else {
3740 __ sub(Operand(eax), Immediate(Smi::FromInt(1))); 3814 __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
3741 } 3815 }
3742 __ j(overflow, &stub_call); 3816 __ j(overflow, &stub_call);
3743 // We could eliminate this smi check if we split the code at 3817 // We could eliminate this smi check if we split the code at
3744 // the first smi check before calling ToNumber. 3818 // the first smi check before calling ToNumber.
3745 __ test(eax, Immediate(kSmiTagMask)); 3819 patch_site.EmitJumpIfSmi(eax, &done);
3746 __ j(zero, &done); 3820
3747 __ bind(&stub_call); 3821 __ bind(&stub_call);
3748 // Call stub. Undo operation first. 3822 // Call stub. Undo operation first.
3749 if (expr->op() == Token::INC) { 3823 if (expr->op() == Token::INC) {
3750 __ sub(Operand(eax), Immediate(Smi::FromInt(1))); 3824 __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
3751 } else { 3825 } else {
3752 __ add(Operand(eax), Immediate(Smi::FromInt(1))); 3826 __ add(Operand(eax), Immediate(Smi::FromInt(1)));
3753 } 3827 }
3754 } 3828 }
3755 3829
3756 // Record position before stub call. 3830 // Record position before stub call.
3757 SetSourcePosition(expr->position()); 3831 SetSourcePosition(expr->position());
3758 3832
3759 // Call stub for +1/-1. 3833 // Call stub for +1/-1.
3760 __ mov(edx, eax); 3834 __ mov(edx, eax);
3761 __ mov(eax, Immediate(Smi::FromInt(1))); 3835 __ mov(eax, Immediate(Smi::FromInt(1)));
3762 TypeRecordingBinaryOpStub stub(expr->binary_op(), 3836 TypeRecordingBinaryOpStub stub(expr->binary_op(),
3763 NO_OVERWRITE); 3837 NO_OVERWRITE);
3764 __ CallStub(&stub); 3838 EmitCallIC(stub.GetCode(), &patch_site);
3765 __ bind(&done); 3839 __ bind(&done);
3766 3840
3767 // Store the value returned in eax. 3841 // Store the value returned in eax.
3768 switch (assign_type) { 3842 switch (assign_type) {
3769 case VARIABLE: 3843 case VARIABLE:
3770 if (expr->is_postfix()) { 3844 if (expr->is_postfix()) {
3771 // Perform the assignment as if via '='. 3845 // Perform the assignment as if via '='.
3772 { EffectContext context(this); 3846 { EffectContext context(this);
3773 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), 3847 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(),
3774 Token::ASSIGN); 3848 Token::ASSIGN);
(...skipping 204 matching lines...) Expand 10 before | Expand all | Expand 10 after
3979 case Token::IN: 4053 case Token::IN:
3980 VisitForStackValue(expr->right()); 4054 VisitForStackValue(expr->right());
3981 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); 4055 __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
3982 PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL); 4056 PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
3983 __ cmp(eax, Factory::true_value()); 4057 __ cmp(eax, Factory::true_value());
3984 Split(equal, if_true, if_false, fall_through); 4058 Split(equal, if_true, if_false, fall_through);
3985 break; 4059 break;
3986 4060
3987 case Token::INSTANCEOF: { 4061 case Token::INSTANCEOF: {
3988 VisitForStackValue(expr->right()); 4062 VisitForStackValue(expr->right());
3989 InstanceofStub stub; 4063 __ IncrementCounter(&Counters::instance_of_full, 1);
4064 InstanceofStub stub(InstanceofStub::kNoFlags);
3990 __ CallStub(&stub); 4065 __ CallStub(&stub);
3991 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); 4066 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
3992 __ test(eax, Operand(eax)); 4067 __ test(eax, Operand(eax));
3993 // The stub returns 0 for true. 4068 // The stub returns 0 for true.
3994 Split(zero, if_true, if_false, fall_through); 4069 Split(zero, if_true, if_false, fall_through);
3995 break; 4070 break;
3996 } 4071 }
3997 4072
3998 default: { 4073 default: {
3999 VisitForAccumulatorValue(expr->right()); 4074 VisitForAccumulatorValue(expr->right());
(...skipping 27 matching lines...) Expand all
4027 cc = greater_equal; 4102 cc = greater_equal;
4028 __ pop(edx); 4103 __ pop(edx);
4029 break; 4104 break;
4030 case Token::IN: 4105 case Token::IN:
4031 case Token::INSTANCEOF: 4106 case Token::INSTANCEOF:
4032 default: 4107 default:
4033 UNREACHABLE(); 4108 UNREACHABLE();
4034 } 4109 }
4035 4110
4036 bool inline_smi_code = ShouldInlineSmiCase(op); 4111 bool inline_smi_code = ShouldInlineSmiCase(op);
4112 JumpPatchSite patch_site(masm_);
4037 if (inline_smi_code) { 4113 if (inline_smi_code) {
4038 NearLabel slow_case; 4114 NearLabel slow_case;
4039 __ mov(ecx, Operand(edx)); 4115 __ mov(ecx, Operand(edx));
4040 __ or_(ecx, Operand(eax)); 4116 __ or_(ecx, Operand(eax));
4041 __ test(ecx, Immediate(kSmiTagMask)); 4117 patch_site.EmitJumpIfNotSmi(ecx, &slow_case);
4042 __ j(not_zero, &slow_case, not_taken);
4043 __ cmp(edx, Operand(eax)); 4118 __ cmp(edx, Operand(eax));
4044 Split(cc, if_true, if_false, NULL); 4119 Split(cc, if_true, if_false, NULL);
4045 __ bind(&slow_case); 4120 __ bind(&slow_case);
4046 } 4121 }
4047 4122
4048 // Record position and call the compare IC. 4123 // Record position and call the compare IC.
4124 SetSourcePosition(expr->position());
4049 Handle<Code> ic = CompareIC::GetUninitialized(op); 4125 Handle<Code> ic = CompareIC::GetUninitialized(op);
4050 SetSourcePosition(expr->position()); 4126 EmitCallIC(ic, &patch_site);
4051 __ call(ic, RelocInfo::CODE_TARGET); 4127
4052 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); 4128 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
4053 __ test(eax, Operand(eax)); 4129 __ test(eax, Operand(eax));
4054 Split(cc, if_true, if_false, fall_through); 4130 Split(cc, if_true, if_false, fall_through);
4055 } 4131 }
4056 } 4132 }
4057 4133
4058 // Convert the result of the comparison into one expected for this 4134 // Convert the result of the comparison into one expected for this
4059 // expression's context. 4135 // expression's context.
4060 context()->Plug(if_true, if_false); 4136 context()->Plug(if_true, if_false);
4061 } 4137 }
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after
4145 case Code::KEYED_STORE_IC: 4221 case Code::KEYED_STORE_IC:
4146 __ nop(); // Signals no inlined code. 4222 __ nop(); // Signals no inlined code.
4147 break; 4223 break;
4148 default: 4224 default:
4149 // Do nothing. 4225 // Do nothing.
4150 break; 4226 break;
4151 } 4227 }
4152 } 4228 }
4153 4229
4154 4230
4231 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
4232 __ call(ic, RelocInfo::CODE_TARGET);
4233 if (patch_site != NULL && patch_site->is_bound()) {
4234 patch_site->EmitPatchInfo();
4235 } else {
4236 __ nop(); // Signals no inlined code.
4237 }
4238 }
4239
4240
4155 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { 4241 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
4156 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); 4242 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
4157 __ mov(Operand(ebp, frame_offset), value); 4243 __ mov(Operand(ebp, frame_offset), value);
4158 } 4244 }
4159 4245
4160 4246
4161 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { 4247 void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
4162 __ mov(dst, ContextOperand(esi, context_index)); 4248 __ mov(dst, ContextOperand(esi, context_index));
4163 } 4249 }
4164 4250
(...skipping 27 matching lines...) Expand all
4192 // And return. 4278 // And return.
4193 __ ret(0); 4279 __ ret(0);
4194 } 4280 }
4195 4281
4196 4282
4197 #undef __ 4283 #undef __
4198 4284
4199 } } // namespace v8::internal 4285 } } // namespace v8::internal
4200 4286
4201 #endif // V8_TARGET_ARCH_IA32 4287 #endif // V8_TARGET_ARCH_IA32
OLDNEW
« no previous file with comments | « src/ia32/disasm-ia32.cc ('k') | src/ia32/ic-ia32.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698