OLD | NEW |
---|---|
1 // Copyright 2010 the V8 project authors. All rights reserved. | 1 // Copyright 2010 the V8 project authors. All rights reserved. |
2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
4 // met: | 4 // met: |
5 // | 5 // |
6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
(...skipping 1530 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1541 } | 1541 } |
1542 | 1542 |
1543 | 1543 |
1544 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { | 1544 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) { |
1545 SetSourcePosition(prop->position()); | 1545 SetSourcePosition(prop->position()); |
1546 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); | 1546 Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); |
1547 EmitCallIC(ic, RelocInfo::CODE_TARGET); | 1547 EmitCallIC(ic, RelocInfo::CODE_TARGET); |
1548 } | 1548 } |
1549 | 1549 |
1550 | 1550 |
1551 class JumpPatchSite BASE_EMBEDDED { | |
1552 public: | |
1553 JumpPatchSite(MacroAssembler* masm, NearLabel* target, Condition cc) | |
1554 : masm_(masm), jump_target_(target), cc_(cc) { } | |
1555 | |
1556 void EmitJump() { | |
1557 masm_->bind(&patch_site_); | |
Vitaly Repeshko
2010/12/09 15:59:01
In debug mode in ~JumpPatchSite we could assert th
fschneider
2010/12/10 13:19:24
Done.
| |
1558 masm_->jmp(jump_target_); | |
1559 } | |
1560 | |
1561 void EmitPatchInfo() { | |
1562 int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_); | |
1563 __ test(eax, Immediate(delta_to_patch_site << 16 | cc_)); | |
Vitaly Repeshko
2010/12/09 15:59:01
For smi checks the condition is always not_zero. W
fschneider
2010/12/10 13:19:24
Done.
| |
1564 } | |
1565 | |
1566 private: | |
1567 MacroAssembler* masm_; | |
1568 Label patch_site_; | |
1569 NearLabel* jump_target_; | |
1570 Condition cc_; | |
1571 }; | |
1572 | |
1573 | |
1551 void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr, | 1574 void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr, |
1552 OverwriteMode mode, | 1575 OverwriteMode mode, |
1553 bool left_is_constant_smi, | 1576 bool left_is_constant_smi, |
1554 Smi* value) { | 1577 Smi* value) { |
1555 NearLabel call_stub; | 1578 NearLabel call_stub, done; |
1556 Label done; | |
1557 __ add(Operand(eax), Immediate(value)); | 1579 __ add(Operand(eax), Immediate(value)); |
1558 __ j(overflow, &call_stub); | 1580 __ j(overflow, &call_stub); |
1559 __ test(eax, Immediate(kSmiTagMask)); | 1581 __ test(eax, Immediate(kSmiTagMask)); |
1560 __ j(zero, &done); | 1582 JumpPatchSite patch_site(masm_, &call_stub, not_zero); |
1583 patch_site.EmitJump(); | |
1584 __ jmp(&done); | |
1561 | 1585 |
1562 // Undo the optimistic add operation and call the shared stub. | 1586 // Undo the optimistic add operation and call the shared stub. |
1563 __ bind(&call_stub); | 1587 __ bind(&call_stub); |
1564 __ sub(Operand(eax), Immediate(value)); | 1588 __ sub(Operand(eax), Immediate(value)); |
1565 Token::Value op = Token::ADD; | 1589 Token::Value op = Token::ADD; |
1566 TypeRecordingBinaryOpStub stub(op, mode); | 1590 TypeRecordingBinaryOpStub stub(op, mode); |
1567 if (left_is_constant_smi) { | 1591 if (left_is_constant_smi) { |
1568 __ mov(edx, Immediate(value)); | 1592 __ mov(edx, Immediate(value)); |
1569 } else { | 1593 } else { |
1570 __ mov(edx, eax); | 1594 __ mov(edx, eax); |
1571 __ mov(eax, Immediate(value)); | 1595 __ mov(eax, Immediate(value)); |
1572 } | 1596 } |
1573 __ CallStub(&stub); | 1597 __ CallStub(&stub); |
Vitaly Repeshko
2010/12/09 15:59:01
Consider adding a function like EmitCallIC that ta
William Hesse
2010/12/09 16:27:58
I'm not sure this is a good idea, since the existi
Kevin Millikin (Chromium)
2010/12/10 06:41:41
I like Vitaly's idea because it's safer. I think
fschneider
2010/12/10 13:19:24
Done.
| |
1598 patch_site.EmitPatchInfo(); | |
1599 | |
1574 __ bind(&done); | 1600 __ bind(&done); |
1575 context()->Plug(eax); | 1601 context()->Plug(eax); |
1576 } | 1602 } |
1577 | 1603 |
1578 | 1604 |
1579 void FullCodeGenerator::EmitConstantSmiSub(Expression* expr, | 1605 void FullCodeGenerator::EmitConstantSmiSub(Expression* expr, |
1580 OverwriteMode mode, | 1606 OverwriteMode mode, |
1581 bool left_is_constant_smi, | 1607 bool left_is_constant_smi, |
1582 Smi* value) { | 1608 Smi* value) { |
1583 Label call_stub, done; | 1609 NearLabel call_stub, done; |
1584 if (left_is_constant_smi) { | 1610 if (left_is_constant_smi) { |
1585 __ mov(ecx, eax); | 1611 __ mov(ecx, eax); |
1586 __ mov(eax, Immediate(value)); | 1612 __ mov(eax, Immediate(value)); |
1587 __ sub(Operand(eax), ecx); | 1613 __ sub(Operand(eax), ecx); |
1588 } else { | 1614 } else { |
1589 __ sub(Operand(eax), Immediate(value)); | 1615 __ sub(Operand(eax), Immediate(value)); |
1590 } | 1616 } |
1591 __ j(overflow, &call_stub); | 1617 __ j(overflow, &call_stub); |
1592 __ test(eax, Immediate(kSmiTagMask)); | 1618 __ test(eax, Immediate(kSmiTagMask)); |
1593 __ j(zero, &done); | 1619 JumpPatchSite patch_site(masm_, &call_stub, not_zero); |
1620 patch_site.EmitJump(); | |
1621 __ jmp(&done); | |
1594 | 1622 |
1595 __ bind(&call_stub); | 1623 __ bind(&call_stub); |
1596 if (left_is_constant_smi) { | 1624 if (left_is_constant_smi) { |
1597 __ mov(edx, Immediate(value)); | 1625 __ mov(edx, Immediate(value)); |
1598 __ mov(eax, ecx); | 1626 __ mov(eax, ecx); |
1599 } else { | 1627 } else { |
1600 __ add(Operand(eax), Immediate(value)); // Undo the subtraction. | 1628 __ add(Operand(eax), Immediate(value)); // Undo the subtraction. |
1601 __ mov(edx, eax); | 1629 __ mov(edx, eax); |
1602 __ mov(eax, Immediate(value)); | 1630 __ mov(eax, Immediate(value)); |
1603 } | 1631 } |
1604 Token::Value op = Token::SUB; | 1632 Token::Value op = Token::SUB; |
1605 TypeRecordingBinaryOpStub stub(op, mode); | 1633 TypeRecordingBinaryOpStub stub(op, mode); |
1606 __ CallStub(&stub); | 1634 __ CallStub(&stub); |
1635 patch_site.EmitPatchInfo(); | |
1636 | |
1607 __ bind(&done); | 1637 __ bind(&done); |
1608 context()->Plug(eax); | 1638 context()->Plug(eax); |
1609 } | 1639 } |
1610 | 1640 |
1611 | 1641 |
1612 void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr, | 1642 void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr, |
1613 Token::Value op, | 1643 Token::Value op, |
1614 OverwriteMode mode, | 1644 OverwriteMode mode, |
1615 Smi* value) { | 1645 Smi* value) { |
1616 Label call_stub, smi_case, done; | 1646 NearLabel call_stub, done; |
1617 int shift_value = value->value() & 0x1f; | 1647 int shift_value = value->value() & 0x1f; |
1618 | 1648 |
1619 __ test(eax, Immediate(kSmiTagMask)); | 1649 __ test(eax, Immediate(kSmiTagMask)); |
1620 __ j(zero, &smi_case); | 1650 // Patch site. |
1651 JumpPatchSite patch_site(masm_, &call_stub, not_zero); | |
1652 patch_site.EmitJump(); | |
1621 | 1653 |
1622 __ bind(&call_stub); | 1654 // Smi case. |
1623 __ mov(edx, eax); | |
1624 __ mov(eax, Immediate(value)); | |
1625 TypeRecordingBinaryOpStub stub(op, mode); | |
1626 __ CallStub(&stub); | |
1627 __ jmp(&done); | |
1628 | |
1629 __ bind(&smi_case); | |
1630 switch (op) { | 1655 switch (op) { |
1631 case Token::SHL: | 1656 case Token::SHL: |
1632 if (shift_value != 0) { | 1657 if (shift_value != 0) { |
1633 __ mov(edx, eax); | 1658 __ mov(edx, eax); |
1634 if (shift_value > 1) { | 1659 if (shift_value > 1) { |
1635 __ shl(edx, shift_value - 1); | 1660 __ shl(edx, shift_value - 1); |
1636 } | 1661 } |
1637 // Convert int result to smi, checking that it is in int range. | 1662 // Convert int result to smi, checking that it is in int range. |
1638 ASSERT(kSmiTagSize == 1); // Adjust code if not the case. | 1663 ASSERT(kSmiTagSize == 1); // Adjust code if not the case. |
1639 __ add(edx, Operand(edx)); | 1664 __ add(edx, Operand(edx)); |
(...skipping 18 matching lines...) Expand all Loading... | |
1658 __ mov(eax, edx); // Put result back into eax. | 1683 __ mov(eax, edx); // Put result back into eax. |
1659 } else { | 1684 } else { |
1660 __ SmiUntag(eax); | 1685 __ SmiUntag(eax); |
1661 __ shr(eax, shift_value); | 1686 __ shr(eax, shift_value); |
1662 __ SmiTag(eax); | 1687 __ SmiTag(eax); |
1663 } | 1688 } |
1664 break; | 1689 break; |
1665 default: | 1690 default: |
1666 UNREACHABLE(); | 1691 UNREACHABLE(); |
1667 } | 1692 } |
1693 __ jmp(&done); | |
1694 | |
1695 // Call stub. | |
1696 __ bind(&call_stub); | |
1697 __ mov(edx, eax); | |
1698 __ mov(eax, Immediate(value)); | |
1699 TypeRecordingBinaryOpStub stub(op, mode); | |
1700 __ CallStub(&stub); | |
1701 patch_site.EmitPatchInfo(); | |
1668 | 1702 |
1669 __ bind(&done); | 1703 __ bind(&done); |
1670 context()->Plug(eax); | 1704 context()->Plug(eax); |
1671 } | 1705 } |
1672 | 1706 |
1673 | 1707 |
1674 void FullCodeGenerator::EmitConstantSmiBitOp(Expression* expr, | 1708 void FullCodeGenerator::EmitConstantSmiBitOp(Expression* expr, |
1675 Token::Value op, | 1709 Token::Value op, |
1676 OverwriteMode mode, | 1710 OverwriteMode mode, |
1677 Smi* value) { | 1711 Smi* value) { |
1678 Label smi_case, done; | 1712 NearLabel call_stub, done; |
1679 __ test(eax, Immediate(kSmiTagMask)); | 1713 __ test(eax, Immediate(kSmiTagMask)); |
1680 __ j(zero, &smi_case); | 1714 // Patch site. The first invocation of the stub will be patch the jmp with |
1715 // the required conditional jump. | |
1716 JumpPatchSite patch_site(masm_, &call_stub, not_zero); | |
1717 patch_site.EmitJump(); | |
1681 | 1718 |
1682 // The order of the arguments does not matter for bit-ops with a | 1719 // Smi case. |
1683 // constant operand. | |
1684 __ mov(edx, Immediate(value)); | |
1685 TypeRecordingBinaryOpStub stub(op, mode); | |
1686 __ CallStub(&stub); | |
1687 __ jmp(&done); | |
1688 | |
1689 __ bind(&smi_case); | |
1690 switch (op) { | 1720 switch (op) { |
1691 case Token::BIT_OR: | 1721 case Token::BIT_OR: |
1692 __ or_(Operand(eax), Immediate(value)); | 1722 __ or_(Operand(eax), Immediate(value)); |
1693 break; | 1723 break; |
1694 case Token::BIT_XOR: | 1724 case Token::BIT_XOR: |
1695 __ xor_(Operand(eax), Immediate(value)); | 1725 __ xor_(Operand(eax), Immediate(value)); |
1696 break; | 1726 break; |
1697 case Token::BIT_AND: | 1727 case Token::BIT_AND: |
1698 __ and_(Operand(eax), Immediate(value)); | 1728 __ and_(Operand(eax), Immediate(value)); |
1699 break; | 1729 break; |
1700 default: | 1730 default: |
1701 UNREACHABLE(); | 1731 UNREACHABLE(); |
1702 } | 1732 } |
1733 __ jmp(&done); | |
1734 | |
1735 // The order of the arguments does not matter for bit-ops with a | |
1736 // constant operand. | |
1737 __ bind(&call_stub); | |
1738 __ mov(edx, Immediate(value)); | |
1739 TypeRecordingBinaryOpStub stub(op, mode); | |
1740 __ CallStub(&stub); | |
1741 patch_site.EmitPatchInfo(); | |
1703 | 1742 |
1704 __ bind(&done); | 1743 __ bind(&done); |
1705 context()->Plug(eax); | 1744 context()->Plug(eax); |
1706 } | 1745 } |
1707 | 1746 |
1708 | 1747 |
1709 void FullCodeGenerator::EmitConstantSmiBinaryOp(Expression* expr, | 1748 void FullCodeGenerator::EmitConstantSmiBinaryOp(Expression* expr, |
1710 Token::Value op, | 1749 Token::Value op, |
1711 OverwriteMode mode, | 1750 OverwriteMode mode, |
1712 bool left_is_constant_smi, | 1751 bool left_is_constant_smi, |
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1746 EmitConstantSmiBinaryOp(expr, op, mode, false, value); | 1785 EmitConstantSmiBinaryOp(expr, op, mode, false, value); |
1747 return; | 1786 return; |
1748 } else if (constant == kLeftConstant) { | 1787 } else if (constant == kLeftConstant) { |
1749 Smi* value = Smi::cast(*left->AsLiteral()->handle()); | 1788 Smi* value = Smi::cast(*left->AsLiteral()->handle()); |
1750 EmitConstantSmiBinaryOp(expr, op, mode, true, value); | 1789 EmitConstantSmiBinaryOp(expr, op, mode, true, value); |
1751 return; | 1790 return; |
1752 } | 1791 } |
1753 | 1792 |
1754 // Do combined smi check of the operands. Left operand is on the | 1793 // Do combined smi check of the operands. Left operand is on the |
1755 // stack. Right operand is in eax. | 1794 // stack. Right operand is in eax. |
1756 Label done, stub_call, smi_case; | 1795 NearLabel done, stub_call; |
1757 __ pop(edx); | 1796 __ pop(edx); |
1758 __ mov(ecx, eax); | 1797 __ mov(ecx, eax); |
1759 __ or_(eax, Operand(edx)); | 1798 __ or_(eax, Operand(edx)); |
1760 __ test(eax, Immediate(kSmiTagMask)); | 1799 __ test(eax, Immediate(kSmiTagMask)); |
1761 __ j(zero, &smi_case); | 1800 JumpPatchSite patch_site(masm_, &stub_call, not_zero); |
1801 patch_site.EmitJump(); | |
1762 | 1802 |
1763 __ bind(&stub_call); | 1803 // Smi case. |
1764 __ mov(eax, ecx); | |
1765 TypeRecordingBinaryOpStub stub(op, mode); | |
1766 __ CallStub(&stub); | |
1767 __ jmp(&done); | |
1768 | |
1769 __ bind(&smi_case); | |
1770 __ mov(eax, edx); // Copy left operand in case of a stub call. | 1804 __ mov(eax, edx); // Copy left operand in case of a stub call. |
1771 | 1805 |
1772 switch (op) { | 1806 switch (op) { |
1773 case Token::SAR: | 1807 case Token::SAR: |
1774 __ SmiUntag(eax); | 1808 __ SmiUntag(eax); |
1775 __ SmiUntag(ecx); | 1809 __ SmiUntag(ecx); |
1776 __ sar_cl(eax); // No checks of result necessary | 1810 __ sar_cl(eax); // No checks of result necessary |
1777 __ SmiTag(eax); | 1811 __ SmiTag(eax); |
1778 break; | 1812 break; |
1779 case Token::SHL: { | 1813 case Token::SHL: { |
(...skipping 47 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
1827 break; | 1861 break; |
1828 case Token::BIT_AND: | 1862 case Token::BIT_AND: |
1829 __ and_(eax, Operand(ecx)); | 1863 __ and_(eax, Operand(ecx)); |
1830 break; | 1864 break; |
1831 case Token::BIT_XOR: | 1865 case Token::BIT_XOR: |
1832 __ xor_(eax, Operand(ecx)); | 1866 __ xor_(eax, Operand(ecx)); |
1833 break; | 1867 break; |
1834 default: | 1868 default: |
1835 UNREACHABLE(); | 1869 UNREACHABLE(); |
1836 } | 1870 } |
1871 __ jmp(&done); | |
1872 | |
1873 __ bind(&stub_call); | |
1874 __ mov(eax, ecx); | |
1875 TypeRecordingBinaryOpStub stub(op, mode); | |
1876 __ CallStub(&stub); | |
1877 patch_site.EmitPatchInfo(); | |
1837 | 1878 |
1838 __ bind(&done); | 1879 __ bind(&done); |
1839 context()->Plug(eax); | 1880 context()->Plug(eax); |
1840 } | 1881 } |
1841 | 1882 |
1842 | 1883 |
1843 void FullCodeGenerator::EmitBinaryOp(Token::Value op, | 1884 void FullCodeGenerator::EmitBinaryOp(Token::Value op, |
1844 OverwriteMode mode) { | 1885 OverwriteMode mode) { |
1845 __ pop(edx); | 1886 __ pop(edx); |
1846 TypeRecordingBinaryOpStub stub(op, mode); | 1887 TypeRecordingBinaryOpStub stub(op, mode); |
1847 __ CallStub(&stub); | 1888 __ CallStub(&stub); |
1889 __ nop(); // Signals no inlined smi code. | |
1848 context()->Plug(eax); | 1890 context()->Plug(eax); |
1849 } | 1891 } |
1850 | 1892 |
1851 | 1893 |
1852 void FullCodeGenerator::EmitAssignment(Expression* expr) { | 1894 void FullCodeGenerator::EmitAssignment(Expression* expr) { |
1853 // Invalid left-hand sides are rewritten to have a 'throw | 1895 // Invalid left-hand sides are rewritten to have a 'throw |
1854 // ReferenceError' on the left-hand side. | 1896 // ReferenceError' on the left-hand side. |
1855 if (!expr->IsValidLeftHandSide()) { | 1897 if (!expr->IsValidLeftHandSide()) { |
1856 VisitForEffect(expr); | 1898 VisitForEffect(expr); |
1857 return; | 1899 return; |
(...skipping 1844 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
3702 break; | 3744 break; |
3703 case KEYED_PROPERTY: | 3745 case KEYED_PROPERTY: |
3704 __ mov(Operand(esp, 2 * kPointerSize), eax); | 3746 __ mov(Operand(esp, 2 * kPointerSize), eax); |
3705 break; | 3747 break; |
3706 } | 3748 } |
3707 } | 3749 } |
3708 } | 3750 } |
3709 | 3751 |
3710 // Inline smi case if we are in a loop. | 3752 // Inline smi case if we are in a loop. |
3711 NearLabel stub_call; | 3753 NearLabel stub_call; |
3754 JumpPatchSite patch_site(masm_, &stub_call, not_zero); | |
3712 Label done; | 3755 Label done; |
3713 if (ShouldInlineSmiCase(expr->op())) { | 3756 if (ShouldInlineSmiCase(expr->op())) { |
3714 if (expr->op() == Token::INC) { | 3757 if (expr->op() == Token::INC) { |
3715 __ add(Operand(eax), Immediate(Smi::FromInt(1))); | 3758 __ add(Operand(eax), Immediate(Smi::FromInt(1))); |
3716 } else { | 3759 } else { |
3717 __ sub(Operand(eax), Immediate(Smi::FromInt(1))); | 3760 __ sub(Operand(eax), Immediate(Smi::FromInt(1))); |
3718 } | 3761 } |
3719 __ j(overflow, &stub_call); | 3762 __ j(overflow, &stub_call); |
3720 // We could eliminate this smi check if we split the code at | 3763 // We could eliminate this smi check if we split the code at |
3721 // the first smi check before calling ToNumber. | 3764 // the first smi check before calling ToNumber. |
3722 __ test(eax, Immediate(kSmiTagMask)); | 3765 __ test(eax, Immediate(kSmiTagMask)); |
3723 __ j(zero, &done); | 3766 patch_site.EmitJump(); |
3767 __ jmp(&done); | |
William Hesse
2010/12/09 16:27:58
Too bad that you have to put an extra jump here, t
| |
3768 | |
3724 __ bind(&stub_call); | 3769 __ bind(&stub_call); |
3725 // Call stub. Undo operation first. | 3770 // Call stub. Undo operation first. |
3726 if (expr->op() == Token::INC) { | 3771 if (expr->op() == Token::INC) { |
3727 __ sub(Operand(eax), Immediate(Smi::FromInt(1))); | 3772 __ sub(Operand(eax), Immediate(Smi::FromInt(1))); |
3728 } else { | 3773 } else { |
3729 __ add(Operand(eax), Immediate(Smi::FromInt(1))); | 3774 __ add(Operand(eax), Immediate(Smi::FromInt(1))); |
3730 } | 3775 } |
3731 } | 3776 } |
3732 | 3777 |
3733 // Record position before stub call. | 3778 // Record position before stub call. |
3734 SetSourcePosition(expr->position()); | 3779 SetSourcePosition(expr->position()); |
3735 | 3780 |
3736 // Call stub for +1/-1. | 3781 // Call stub for +1/-1. |
3737 __ mov(edx, eax); | 3782 __ mov(edx, eax); |
3738 __ mov(eax, Immediate(Smi::FromInt(1))); | 3783 __ mov(eax, Immediate(Smi::FromInt(1))); |
3739 TypeRecordingBinaryOpStub stub(expr->binary_op(), | 3784 TypeRecordingBinaryOpStub stub(expr->binary_op(), |
3740 NO_OVERWRITE); | 3785 NO_OVERWRITE); |
3741 __ CallStub(&stub); | 3786 __ CallStub(&stub); |
3787 // Indicate here if smi code was inlined. | |
3788 if (ShouldInlineSmiCase(expr->op())) { | |
3789 patch_site.EmitPatchInfo(); | |
3790 } else { | |
3791 __ nop(); // Signals no inlined code. | |
3792 } | |
3793 | |
3742 __ bind(&done); | 3794 __ bind(&done); |
3743 | |
3744 // Store the value returned in eax. | 3795 // Store the value returned in eax. |
3745 switch (assign_type) { | 3796 switch (assign_type) { |
3746 case VARIABLE: | 3797 case VARIABLE: |
3747 if (expr->is_postfix()) { | 3798 if (expr->is_postfix()) { |
3748 // Perform the assignment as if via '='. | 3799 // Perform the assignment as if via '='. |
3749 { EffectContext context(this); | 3800 { EffectContext context(this); |
3750 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), | 3801 EmitVariableAssignment(expr->expression()->AsVariableProxy()->var(), |
3751 Token::ASSIGN); | 3802 Token::ASSIGN); |
3752 } | 3803 } |
3753 // For all contexts except EffectContext We have the result on | 3804 // For all contexts except EffectContext We have the result on |
(...skipping 409 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
4163 // And return. | 4214 // And return. |
4164 __ ret(0); | 4215 __ ret(0); |
4165 } | 4216 } |
4166 | 4217 |
4167 | 4218 |
4168 #undef __ | 4219 #undef __ |
4169 | 4220 |
4170 } } // namespace v8::internal | 4221 } } // namespace v8::internal |
4171 | 4222 |
4172 #endif // V8_TARGET_ARCH_IA32 | 4223 #endif // V8_TARGET_ARCH_IA32 |
OLD | NEW |