OLD | NEW |
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 23 matching lines...) Expand all Loading... |
34 #include "compiler.h" | 34 #include "compiler.h" |
35 #include "debug.h" | 35 #include "debug.h" |
36 #include "full-codegen.h" | 36 #include "full-codegen.h" |
37 #include "parser.h" | 37 #include "parser.h" |
38 #include "scopes.h" | 38 #include "scopes.h" |
39 #include "stub-cache.h" | 39 #include "stub-cache.h" |
40 | 40 |
41 namespace v8 { | 41 namespace v8 { |
42 namespace internal { | 42 namespace internal { |
43 | 43 |
| 44 |
44 #define __ ACCESS_MASM(masm_) | 45 #define __ ACCESS_MASM(masm_) |
45 | 46 |
| 47 |
| 48 class JumpPatchSite BASE_EMBEDDED { |
| 49 public: |
| 50 explicit JumpPatchSite(MacroAssembler* masm) |
| 51 : masm_(masm) { |
| 52 #ifdef DEBUG |
| 53 info_emitted_ = false; |
| 54 #endif |
| 55 } |
| 56 |
| 57 ~JumpPatchSite() { |
| 58 ASSERT(patch_site_.is_bound() == info_emitted_); |
| 59 } |
| 60 |
| 61 void EmitJumpIfNotSmi(Register reg, NearLabel* target) { |
| 62 __ test(reg, Immediate(kSmiTagMask)); |
| 63 EmitJump(not_carry, target); // Always taken before patched. |
| 64 } |
| 65 |
| 66 void EmitJumpIfSmi(Register reg, NearLabel* target) { |
| 67 __ test(reg, Immediate(kSmiTagMask)); |
| 68 EmitJump(carry, target); // Never taken before patched. |
| 69 } |
| 70 |
| 71 void EmitPatchInfo() { |
| 72 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_); |
| 73 ASSERT(is_int8(delta_to_patch_site)); |
| 74 __ test(eax, Immediate(delta_to_patch_site)); |
| 75 #ifdef DEBUG |
| 76 info_emitted_ = true; |
| 77 #endif |
| 78 } |
| 79 |
| 80 bool is_bound() const { return patch_site_.is_bound(); } |
| 81 |
| 82 private: |
| 83 // jc will be patched with jz, jnc will become jnz. |
| 84 void EmitJump(Condition cc, NearLabel* target) { |
| 85 ASSERT(!patch_site_.is_bound() && !info_emitted_); |
| 86 ASSERT(cc == carry || cc == not_carry); |
| 87 __ bind(&patch_site_); |
| 88 __ j(cc, target); |
| 89 } |
| 90 |
| 91 MacroAssembler* masm_; |
| 92 Label patch_site_; |
| 93 #ifdef DEBUG |
| 94 bool info_emitted_; |
| 95 #endif |
| 96 }; |
| 97 |
| 98 |
46 // Generate code for a JS function. On entry to the function the receiver | 99 // Generate code for a JS function. On entry to the function the receiver |
47 // and arguments have been pushed on the stack left to right, with the | 100 // and arguments have been pushed on the stack left to right, with the |
48 // return address on top of them. The actual argument count matches the | 101 // return address on top of them. The actual argument count matches the |
49 // formal parameter count expected by the function. | 102 // formal parameter count expected by the function. |
50 // | 103 // |
51 // The live registers are: | 104 // The live registers are: |
52 // o edi: the JS function object being called (ie, ourselves) | 105 // o edi: the JS function object being called (ie, ourselves) |
53 // o esi: our context | 106 // o esi: our context |
54 // o ebp: our caller's frame pointer | 107 // o ebp: our caller's frame pointer |
55 // o esp: stack pointer (pointing to return address) | 108 // o esp: stack pointer (pointing to return address) |
(...skipping 652 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
708 Comment cmnt(masm_, "[ Case comparison"); | 761 Comment cmnt(masm_, "[ Case comparison"); |
709 __ bind(&next_test); | 762 __ bind(&next_test); |
710 next_test.Unuse(); | 763 next_test.Unuse(); |
711 | 764 |
712 // Compile the label expression. | 765 // Compile the label expression. |
713 VisitForAccumulatorValue(clause->label()); | 766 VisitForAccumulatorValue(clause->label()); |
714 | 767 |
715 // Perform the comparison as if via '==='. | 768 // Perform the comparison as if via '==='. |
716 __ mov(edx, Operand(esp, 0)); // Switch value. | 769 __ mov(edx, Operand(esp, 0)); // Switch value. |
717 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); | 770 bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT); |
| 771 JumpPatchSite patch_site(masm_); |
718 if (inline_smi_code) { | 772 if (inline_smi_code) { |
719 NearLabel slow_case; | 773 NearLabel slow_case; |
720 __ mov(ecx, edx); | 774 __ mov(ecx, edx); |
721 __ or_(ecx, Operand(eax)); | 775 __ or_(ecx, Operand(eax)); |
722 __ test(ecx, Immediate(kSmiTagMask)); | 776 patch_site.EmitJumpIfNotSmi(ecx, &slow_case); |
723 __ j(not_zero, &slow_case, not_taken); | 777 |
724 __ cmp(edx, Operand(eax)); | 778 __ cmp(edx, Operand(eax)); |
725 __ j(not_equal, &next_test); | 779 __ j(not_equal, &next_test); |
726 __ Drop(1); // Switch value is no longer needed. | 780 __ Drop(1); // Switch value is no longer needed. |
727 __ jmp(clause->body_target()->entry_label()); | 781 __ jmp(clause->body_target()->entry_label()); |
728 __ bind(&slow_case); | 782 __ bind(&slow_case); |
729 } | 783 } |
730 | 784 |
731 // Record position before stub call for type feedback. | 785 // Record position before stub call for type feedback. |
732 SetSourcePosition(clause->position()); | 786 SetSourcePosition(clause->position()); |
733 | |
734 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT); | 787 Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT); |
735 __ call(ic, RelocInfo::CODE_TARGET); | 788 EmitCallIC(ic, &patch_site); |
736 | 789 |
737 __ test(eax, Operand(eax)); | 790 __ test(eax, Operand(eax)); |
738 __ j(not_equal, &next_test); | 791 __ j(not_equal, &next_test); |
739 __ Drop(1); // Switch value is no longer needed. | 792 __ Drop(1); // Switch value is no longer needed. |
740 __ jmp(clause->body_target()->entry_label()); | 793 __ jmp(clause->body_target()->entry_label()); |
741 } | 794 } |
742 | 795 |
743 // Discard the test value and jump to the default if present, otherwise to | 796 // Discard the test value and jump to the default if present, otherwise to |
744 // the end of the statement. | 797 // the end of the statement. |
745 __ bind(&next_test); | 798 __ bind(&next_test); |
(...skipping 803 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1549 SetSourcePosition(prop->position()); | 1602 SetSourcePosition(prop->position()); |
1550 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); | 1603 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); |
1551 EmitCallIC(ic, RelocInfo::CODE_TARGET); | 1604 EmitCallIC(ic, RelocInfo::CODE_TARGET); |
1552 } | 1605 } |
1553 | 1606 |
1554 | 1607 |
1555 void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr, | 1608 void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr, |
1556 OverwriteMode mode, | 1609 OverwriteMode mode, |
1557 bool left_is_constant_smi, | 1610 bool left_is_constant_smi, |
1558 Smi* value) { | 1611 Smi* value) { |
1559 NearLabel call_stub; | 1612 NearLabel call_stub, done; |
1560 Label done; | |
1561 __ add(Operand(eax), Immediate(value)); | 1613 __ add(Operand(eax), Immediate(value)); |
1562 __ j(overflow, &call_stub); | 1614 __ j(overflow, &call_stub); |
1563 __ test(eax, Immediate(kSmiTagMask)); | 1615 JumpPatchSite patch_site(masm_); |
1564 __ j(zero, &done); | 1616 patch_site.EmitJumpIfSmi(eax, &done); |
1565 | 1617 |
1566 // Undo the optimistic add operation and call the shared stub. | 1618 // Undo the optimistic add operation and call the shared stub. |
1567 __ bind(&call_stub); | 1619 __ bind(&call_stub); |
1568 __ sub(Operand(eax), Immediate(value)); | 1620 __ sub(Operand(eax), Immediate(value)); |
1569 Token::Value op = Token::ADD; | 1621 Token::Value op = Token::ADD; |
1570 TypeRecordingBinaryOpStub stub(op, mode); | 1622 TypeRecordingBinaryOpStub stub(op, mode); |
1571 if (left_is_constant_smi) { | 1623 if (left_is_constant_smi) { |
1572 __ mov(edx, Immediate(value)); | 1624 __ mov(edx, Immediate(value)); |
1573 } else { | 1625 } else { |
1574 __ mov(edx, eax); | 1626 __ mov(edx, eax); |
1575 __ mov(eax, Immediate(value)); | 1627 __ mov(eax, Immediate(value)); |
1576 } | 1628 } |
1577 __ CallStub(&stub); | 1629 EmitCallIC(stub.GetCode(), &patch_site); |
| 1630 |
1578 __ bind(&done); | 1631 __ bind(&done); |
1579 context()->Plug(eax); | 1632 context()->Plug(eax); |
1580 } | 1633 } |
1581 | 1634 |
1582 | 1635 |
1583 void FullCodeGenerator::EmitConstantSmiSub(Expression* expr, | 1636 void FullCodeGenerator::EmitConstantSmiSub(Expression* expr, |
1584 OverwriteMode mode, | 1637 OverwriteMode mode, |
1585 bool left_is_constant_smi, | 1638 bool left_is_constant_smi, |
1586 Smi* value) { | 1639 Smi* value) { |
1587 Label call_stub, done; | 1640 NearLabel call_stub, done; |
1588 if (left_is_constant_smi) { | 1641 if (left_is_constant_smi) { |
1589 __ mov(ecx, eax); | 1642 __ mov(ecx, eax); |
1590 __ mov(eax, Immediate(value)); | 1643 __ mov(eax, Immediate(value)); |
1591 __ sub(Operand(eax), ecx); | 1644 __ sub(Operand(eax), ecx); |
1592 } else { | 1645 } else { |
1593 __ sub(Operand(eax), Immediate(value)); | 1646 __ sub(Operand(eax), Immediate(value)); |
1594 } | 1647 } |
1595 __ j(overflow, &call_stub); | 1648 __ j(overflow, &call_stub); |
1596 __ test(eax, Immediate(kSmiTagMask)); | 1649 JumpPatchSite patch_site(masm_); |
1597 __ j(zero, &done); | 1650 patch_site.EmitJumpIfSmi(eax, &done); |
1598 | 1651 |
1599 __ bind(&call_stub); | 1652 __ bind(&call_stub); |
1600 if (left_is_constant_smi) { | 1653 if (left_is_constant_smi) { |
1601 __ mov(edx, Immediate(value)); | 1654 __ mov(edx, Immediate(value)); |
1602 __ mov(eax, ecx); | 1655 __ mov(eax, ecx); |
1603 } else { | 1656 } else { |
1604 __ add(Operand(eax), Immediate(value)); // Undo the subtraction. | 1657 __ add(Operand(eax), Immediate(value)); // Undo the subtraction. |
1605 __ mov(edx, eax); | 1658 __ mov(edx, eax); |
1606 __ mov(eax, Immediate(value)); | 1659 __ mov(eax, Immediate(value)); |
1607 } | 1660 } |
1608 Token::Value op = Token::SUB; | 1661 Token::Value op = Token::SUB; |
1609 TypeRecordingBinaryOpStub stub(op, mode); | 1662 TypeRecordingBinaryOpStub stub(op, mode); |
1610 __ CallStub(&stub); | 1663 EmitCallIC(stub.GetCode(), &patch_site); |
| 1664 |
1611 __ bind(&done); | 1665 __ bind(&done); |
1612 context()->Plug(eax); | 1666 context()->Plug(eax); |
1613 } | 1667 } |
1614 | 1668 |
1615 | 1669 |
1616 void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr, | 1670 void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr, |
1617 Token::Value op, | 1671 Token::Value op, |
1618 OverwriteMode mode, | 1672 OverwriteMode mode, |
1619 Smi* value) { | 1673 Smi* value) { |
1620 Label call_stub, smi_case, done; | 1674 NearLabel call_stub, smi_case, done; |
1621 int shift_value = value->value() & 0x1f; | 1675 int shift_value = value->value() & 0x1f; |
1622 | 1676 |
1623 __ test(eax, Immediate(kSmiTagMask)); | 1677 JumpPatchSite patch_site(masm_); |
1624 __ j(zero, &smi_case); | 1678 patch_site.EmitJumpIfSmi(eax, &smi_case); |
1625 | 1679 |
| 1680 // Call stub. |
1626 __ bind(&call_stub); | 1681 __ bind(&call_stub); |
1627 __ mov(edx, eax); | 1682 __ mov(edx, eax); |
1628 __ mov(eax, Immediate(value)); | 1683 __ mov(eax, Immediate(value)); |
1629 TypeRecordingBinaryOpStub stub(op, mode); | 1684 TypeRecordingBinaryOpStub stub(op, mode); |
1630 __ CallStub(&stub); | 1685 EmitCallIC(stub.GetCode(), &patch_site); |
1631 __ jmp(&done); | 1686 __ jmp(&done); |
1632 | 1687 |
| 1688 // Smi case. |
1633 __ bind(&smi_case); | 1689 __ bind(&smi_case); |
1634 switch (op) { | 1690 switch (op) { |
1635 case Token::SHL: | 1691 case Token::SHL: |
1636 if (shift_value != 0) { | 1692 if (shift_value != 0) { |
1637 __ mov(edx, eax); | 1693 __ mov(edx, eax); |
1638 if (shift_value > 1) { | 1694 if (shift_value > 1) { |
1639 __ shl(edx, shift_value - 1); | 1695 __ shl(edx, shift_value - 1); |
1640 } | 1696 } |
1641 // Convert int result to smi, checking that it is in int range. | 1697 // Convert int result to smi, checking that it is in int range. |
1642 ASSERT(kSmiTagSize == 1); // Adjust code if not the case. | 1698 ASSERT(kSmiTagSize == 1); // Adjust code if not the case. |
(...skipping 29 matching lines...) Expand all Loading... |
1672 | 1728 |
1673 __ bind(&done); | 1729 __ bind(&done); |
1674 context()->Plug(eax); | 1730 context()->Plug(eax); |
1675 } | 1731 } |
1676 | 1732 |
1677 | 1733 |
1678 void FullCodeGenerator::EmitConstantSmiBitOp(Expression* expr, | 1734 void FullCodeGenerator::EmitConstantSmiBitOp(Expression* expr, |
1679 Token::Value op, | 1735 Token::Value op, |
1680 OverwriteMode mode, | 1736 OverwriteMode mode, |
1681 Smi* value) { | 1737 Smi* value) { |
1682 Label smi_case, done; | 1738 NearLabel smi_case, done; |
1683 __ test(eax, Immediate(kSmiTagMask)); | 1739 |
1684 __ j(zero, &smi_case); | 1740 JumpPatchSite patch_site(masm_); |
| 1741 patch_site.EmitJumpIfSmi(eax, &smi_case); |
1685 | 1742 |
1686 // The order of the arguments does not matter for bit-ops with a | 1743 // The order of the arguments does not matter for bit-ops with a |
1687 // constant operand. | 1744 // constant operand. |
1688 __ mov(edx, Immediate(value)); | 1745 __ mov(edx, Immediate(value)); |
1689 TypeRecordingBinaryOpStub stub(op, mode); | 1746 TypeRecordingBinaryOpStub stub(op, mode); |
1690 __ CallStub(&stub); | 1747 EmitCallIC(stub.GetCode(), &patch_site); |
1691 __ jmp(&done); | 1748 __ jmp(&done); |
1692 | 1749 |
| 1750 // Smi case. |
1693 __ bind(&smi_case); | 1751 __ bind(&smi_case); |
1694 switch (op) { | 1752 switch (op) { |
1695 case Token::BIT_OR: | 1753 case Token::BIT_OR: |
1696 __ or_(Operand(eax), Immediate(value)); | 1754 __ or_(Operand(eax), Immediate(value)); |
1697 break; | 1755 break; |
1698 case Token::BIT_XOR: | 1756 case Token::BIT_XOR: |
1699 __ xor_(Operand(eax), Immediate(value)); | 1757 __ xor_(Operand(eax), Immediate(value)); |
1700 break; | 1758 break; |
1701 case Token::BIT_AND: | 1759 case Token::BIT_AND: |
1702 __ and_(Operand(eax), Immediate(value)); | 1760 __ and_(Operand(eax), Immediate(value)); |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1750 EmitConstantSmiBinaryOp(expr, op, mode, false, value); | 1808 EmitConstantSmiBinaryOp(expr, op, mode, false, value); |
1751 return; | 1809 return; |
1752 } else if (constant == kLeftConstant) { | 1810 } else if (constant == kLeftConstant) { |
1753 Smi* value = Smi::cast(*left->AsLiteral()->handle()); | 1811 Smi* value = Smi::cast(*left->AsLiteral()->handle()); |
1754 EmitConstantSmiBinaryOp(expr, op, mode, true, value); | 1812 EmitConstantSmiBinaryOp(expr, op, mode, true, value); |
1755 return; | 1813 return; |
1756 } | 1814 } |
1757 | 1815 |
1758 // Do combined smi check of the operands. Left operand is on the | 1816 // Do combined smi check of the operands. Left operand is on the |
1759 // stack. Right operand is in eax. | 1817 // stack. Right operand is in eax. |
1760 Label done, stub_call, smi_case; | 1818 NearLabel done, smi_case, stub_call; |
1761 __ pop(edx); | 1819 __ pop(edx); |
1762 __ mov(ecx, eax); | 1820 __ mov(ecx, eax); |
1763 __ or_(eax, Operand(edx)); | 1821 __ or_(eax, Operand(edx)); |
1764 __ test(eax, Immediate(kSmiTagMask)); | 1822 JumpPatchSite patch_site(masm_); |
1765 __ j(zero, &smi_case); | 1823 patch_site.EmitJumpIfSmi(eax, &smi_case); |
1766 | 1824 |
1767 __ bind(&stub_call); | 1825 __ bind(&stub_call); |
1768 __ mov(eax, ecx); | 1826 __ mov(eax, ecx); |
1769 TypeRecordingBinaryOpStub stub(op, mode); | 1827 TypeRecordingBinaryOpStub stub(op, mode); |
1770 __ CallStub(&stub); | 1828 EmitCallIC(stub.GetCode(), &patch_site); |
1771 __ jmp(&done); | 1829 __ jmp(&done); |
1772 | 1830 |
| 1831 // Smi case. |
1773 __ bind(&smi_case); | 1832 __ bind(&smi_case); |
1774 __ mov(eax, edx); // Copy left operand in case of a stub call. | 1833 __ mov(eax, edx); // Copy left operand in case of a stub call. |
1775 | 1834 |
1776 switch (op) { | 1835 switch (op) { |
1777 case Token::SAR: | 1836 case Token::SAR: |
1778 __ SmiUntag(eax); | 1837 __ SmiUntag(eax); |
1779 __ SmiUntag(ecx); | 1838 __ SmiUntag(ecx); |
1780 __ sar_cl(eax); // No checks of result necessary | 1839 __ sar_cl(eax); // No checks of result necessary |
1781 __ SmiTag(eax); | 1840 __ SmiTag(eax); |
1782 break; | 1841 break; |
(...skipping 58 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1841 | 1900 |
1842 __ bind(&done); | 1901 __ bind(&done); |
1843 context()->Plug(eax); | 1902 context()->Plug(eax); |
1844 } | 1903 } |
1845 | 1904 |
1846 | 1905 |
1847 void FullCodeGenerator::EmitBinaryOp(Token::Value op, | 1906 void FullCodeGenerator::EmitBinaryOp(Token::Value op, |
1848 OverwriteMode mode) { | 1907 OverwriteMode mode) { |
1849 __ pop(edx); | 1908 __ pop(edx); |
1850 TypeRecordingBinaryOpStub stub(op, mode); | 1909 TypeRecordingBinaryOpStub stub(op, mode); |
1851 __ CallStub(&stub); | 1910 EmitCallIC(stub.GetCode(), NULL); // NULL signals no inlined smi code. |
1852 context()->Plug(eax); | 1911 context()->Plug(eax); |
1853 } | 1912 } |
1854 | 1913 |
1855 | 1914 |
1856 void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) { | 1915 void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) { |
1857 // Invalid left-hand sides are rewritten to have a 'throw | 1916 // Invalid left-hand sides are rewritten to have a 'throw |
1858 // ReferenceError' on the left-hand side. | 1917 // ReferenceError' on the left-hand side. |
1859 if (!expr->IsValidLeftHandSide()) { | 1918 if (!expr->IsValidLeftHandSide()) { |
1860 VisitForEffect(expr); | 1919 VisitForEffect(expr); |
1861 return; | 1920 return; |
(...skipping 1844 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
3706 __ mov(Operand(esp, kPointerSize), eax); | 3765 __ mov(Operand(esp, kPointerSize), eax); |
3707 break; | 3766 break; |
3708 case KEYED_PROPERTY: | 3767 case KEYED_PROPERTY: |
3709 __ mov(Operand(esp, 2 * kPointerSize), eax); | 3768 __ mov(Operand(esp, 2 * kPointerSize), eax); |
3710 break; | 3769 break; |
3711 } | 3770 } |
3712 } | 3771 } |
3713 } | 3772 } |
3714 | 3773 |
3715 // Inline smi case if we are in a loop. | 3774 // Inline smi case if we are in a loop. |
3716 NearLabel stub_call; | 3775 NearLabel stub_call, done; |
3717 Label done; | 3776 JumpPatchSite patch_site(masm_); |
| 3777 |
3718 if (ShouldInlineSmiCase(expr->op())) { | 3778 if (ShouldInlineSmiCase(expr->op())) { |
3719 if (expr->op() == Token::INC) { | 3779 if (expr->op() == Token::INC) { |
3720 __ add(Operand(eax), Immediate(Smi::FromInt(1))); | 3780 __ add(Operand(eax), Immediate(Smi::FromInt(1))); |
3721 } else { | 3781 } else { |
3722 __ sub(Operand(eax), Immediate(Smi::FromInt(1))); | 3782 __ sub(Operand(eax), Immediate(Smi::FromInt(1))); |
3723 } | 3783 } |
3724 __ j(overflow, &stub_call); | 3784 __ j(overflow, &stub_call); |
3725 // We could eliminate this smi check if we split the code at | 3785 // We could eliminate this smi check if we split the code at |
3726 // the first smi check before calling ToNumber. | 3786 // the first smi check before calling ToNumber. |
3727 __ test(eax, Immediate(kSmiTagMask)); | 3787 patch_site.EmitJumpIfSmi(eax, &done); |
3728 __ j(zero, &done); | 3788 |
3729 __ bind(&stub_call); | 3789 __ bind(&stub_call); |
3730 // Call stub. Undo operation first. | 3790 // Call stub. Undo operation first. |
3731 if (expr->op() == Token::INC) { | 3791 if (expr->op() == Token::INC) { |
3732 __ sub(Operand(eax), Immediate(Smi::FromInt(1))); | 3792 __ sub(Operand(eax), Immediate(Smi::FromInt(1))); |
3733 } else { | 3793 } else { |
3734 __ add(Operand(eax), Immediate(Smi::FromInt(1))); | 3794 __ add(Operand(eax), Immediate(Smi::FromInt(1))); |
3735 } | 3795 } |
3736 } | 3796 } |
3737 | 3797 |
3738 // Record position before stub call. | 3798 // Record position before stub call. |
3739 SetSourcePosition(expr->position()); | 3799 SetSourcePosition(expr->position()); |
3740 | 3800 |
3741 // Call stub for +1/-1. | 3801 // Call stub for +1/-1. |
3742 __ mov(edx, eax); | 3802 __ mov(edx, eax); |
3743 __ mov(eax, Immediate(Smi::FromInt(1))); | 3803 __ mov(eax, Immediate(Smi::FromInt(1))); |
3744 TypeRecordingBinaryOpStub stub(expr->binary_op(), | 3804 TypeRecordingBinaryOpStub stub(expr->binary_op(), |
3745 NO_OVERWRITE); | 3805 NO_OVERWRITE); |
3746 __ CallStub(&stub); | 3806 EmitCallIC(stub.GetCode(), &patch_site); |
3747 __ bind(&done); | 3807 __ bind(&done); |
3748 | 3808 |
3749 // Store the value returned in eax. | 3809 // Store the value returned in eax. |
3750 switch (assign_type) { | 3810 switch (assign_type) { |
3751 case VARIABLE: | 3811 case VARIABLE: |
3752 if (expr->is_postfix()) { | 3812 if (expr->is_postfix()) { |
3753 // Perform the assignment as if via '='. | 3813 // Perform the assignment as if via '='. |
3754 { EffectContext context(this); | 3814 { EffectContext context(this); |
3755 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), | 3815 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), |
3756 Token::ASSIGN); | 3816 Token::ASSIGN); |
(...skipping 252 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4009 cc = greater_equal; | 4069 cc = greater_equal; |
4010 __ pop(edx); | 4070 __ pop(edx); |
4011 break; | 4071 break; |
4012 case Token::IN: | 4072 case Token::IN: |
4013 case Token::INSTANCEOF: | 4073 case Token::INSTANCEOF: |
4014 default: | 4074 default: |
4015 UNREACHABLE(); | 4075 UNREACHABLE(); |
4016 } | 4076 } |
4017 | 4077 |
4018 bool inline_smi_code = ShouldInlineSmiCase(op); | 4078 bool inline_smi_code = ShouldInlineSmiCase(op); |
| 4079 JumpPatchSite patch_site(masm_); |
4019 if (inline_smi_code) { | 4080 if (inline_smi_code) { |
4020 NearLabel slow_case; | 4081 NearLabel slow_case; |
4021 __ mov(ecx, Operand(edx)); | 4082 __ mov(ecx, Operand(edx)); |
4022 __ or_(ecx, Operand(eax)); | 4083 __ or_(ecx, Operand(eax)); |
4023 __ test(ecx, Immediate(kSmiTagMask)); | 4084 patch_site.EmitJumpIfNotSmi(ecx, &slow_case); |
4024 __ j(not_zero, &slow_case, not_taken); | |
4025 __ cmp(edx, Operand(eax)); | 4085 __ cmp(edx, Operand(eax)); |
4026 Split(cc, if_true, if_false, NULL); | 4086 Split(cc, if_true, if_false, NULL); |
4027 __ bind(&slow_case); | 4087 __ bind(&slow_case); |
4028 } | 4088 } |
4029 | 4089 |
4030 // Record position and call the compare IC. | 4090 // Record position and call the compare IC. |
| 4091 SetSourcePosition(expr->position()); |
4031 Handle<Code> ic = CompareIC::GetUninitialized(op); | 4092 Handle<Code> ic = CompareIC::GetUninitialized(op); |
4032 SetSourcePosition(expr->position()); | 4093 EmitCallIC(ic, &patch_site); |
4033 __ call(ic, RelocInfo::CODE_TARGET); | 4094 |
4034 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); | 4095 PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false); |
4035 __ test(eax, Operand(eax)); | 4096 __ test(eax, Operand(eax)); |
4036 Split(cc, if_true, if_false, fall_through); | 4097 Split(cc, if_true, if_false, fall_through); |
4037 } | 4098 } |
4038 } | 4099 } |
4039 | 4100 |
4040 // Convert the result of the comparison into one expected for this | 4101 // Convert the result of the comparison into one expected for this |
4041 // expression's context. | 4102 // expression's context. |
4042 context()->Plug(if_true, if_false); | 4103 context()->Plug(if_true, if_false); |
4043 } | 4104 } |
(...skipping 83 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4127 case Code::KEYED_STORE_IC: | 4188 case Code::KEYED_STORE_IC: |
4128 __ nop(); // Signals no inlined code. | 4189 __ nop(); // Signals no inlined code. |
4129 break; | 4190 break; |
4130 default: | 4191 default: |
4131 // Do nothing. | 4192 // Do nothing. |
4132 break; | 4193 break; |
4133 } | 4194 } |
4134 } | 4195 } |
4135 | 4196 |
4136 | 4197 |
| 4198 void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) { |
| 4199 __ call(ic, RelocInfo::CODE_TARGET); |
| 4200 if (patch_site != NULL && patch_site->is_bound()) { |
| 4201 patch_site->EmitPatchInfo(); |
| 4202 } else { |
| 4203 __ nop(); // Signals no inlined code. |
| 4204 } |
| 4205 } |
| 4206 |
| 4207 |
4137 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { | 4208 void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) { |
4138 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); | 4209 ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset); |
4139 __ mov(Operand(ebp, frame_offset), value); | 4210 __ mov(Operand(ebp, frame_offset), value); |
4140 } | 4211 } |
4141 | 4212 |
4142 | 4213 |
4143 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { | 4214 void FullCodeGenerator::LoadContextField(Register dst, int context_index) { |
4144 __ mov(dst, ContextOperand(esi, context_index)); | 4215 __ mov(dst, ContextOperand(esi, context_index)); |
4145 } | 4216 } |
4146 | 4217 |
(...skipping 27 matching lines...) Expand all Loading... |
4174 // And return. | 4245 // And return. |
4175 __ ret(0); | 4246 __ ret(0); |
4176 } | 4247 } |
4177 | 4248 |
4178 | 4249 |
4179 #undef __ | 4250 #undef __ |
4180 | 4251 |
4181 } } // namespace v8::internal | 4252 } } // namespace v8::internal |
4182 | 4253 |
4183 #endif // V8_TARGET_ARCH_IA32 | 4254 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |