Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(186)

Side by Side Diff: src/x64/codegen-x64.cc

Issue 149002: X64: Count operations (increment, decrement) (Closed)
Patch Set: Created 11 years, 6 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch
OLDNEW
1 // Copyright 2009 the V8 project authors. All rights reserved. 1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1574 matching lines...) Expand 10 before | Expand all | Expand 10 after
1585 break; 1585 break;
1586 } 1586 }
1587 1587
1588 default: 1588 default:
1589 UNREACHABLE(); 1589 UNREACHABLE();
1590 } 1590 }
1591 } 1591 }
1592 } 1592 }
1593 1593
1594 1594
1595 void CodeGenerator::VisitCountOperation(CountOperation* a) { 1595 // The value in dst was optimistically incremented or decremented. The
1596 UNIMPLEMENTED(); 1596 // result overflowed or was not smi tagged. Undo the operation, call
1597 // into the runtime to convert the argument to a number, and call the
1598 // specialized add or subtract stub. The result is left in dst.
1599 class DeferredPrefixCountOperation: public DeferredCode {
1600 public:
1601 DeferredPrefixCountOperation(Register dst, bool is_increment)
1602 : dst_(dst), is_increment_(is_increment) {
1603 set_comment("[ DeferredCountOperation");
1604 }
1605
1606 virtual void Generate();
1607
1608 private:
1609 Register dst_;
1610 bool is_increment_;
1611 };
1612
1613
1614 void DeferredPrefixCountOperation::Generate() {
1615 // Undo the optimistic smi operation.
1616 if (is_increment_) {
1617 __ subq(dst_, Immediate(Smi::FromInt(1)));
1618 } else {
1619 __ addq(dst_, Immediate(Smi::FromInt(1)));
1620 }
1621 __ push(dst_);
1622 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
1623 __ push(rax);
1624 __ push(Immediate(Smi::FromInt(1)));
1625 if (is_increment_) {
1626 __ CallRuntime(Runtime::kNumberAdd, 2);
1627 } else {
1628 __ CallRuntime(Runtime::kNumberSub, 2);
1629 }
1630 if (!dst_.is(rax)) __ movq(dst_, rax);
1597 } 1631 }
1598 1632
1633
1634 // The value in dst was optimistically incremented or decremented. The
1635 // result overflowed or was not smi tagged. Undo the operation and call
1636 // into the runtime to convert the argument to a number. Update the
1637 // original value in old. Call the specialized add or subtract stub.
1638 // The result is left in dst.
1639 class DeferredPostfixCountOperation: public DeferredCode {
1640 public:
1641 DeferredPostfixCountOperation(Register dst, Register old, bool is_increment)
1642 : dst_(dst), old_(old), is_increment_(is_increment) {
1643 set_comment("[ DeferredCountOperation");
1644 }
1645
1646 virtual void Generate();
1647
1648 private:
1649 Register dst_;
1650 Register old_;
1651 bool is_increment_;
1652 };
1653
1654
1655 void DeferredPostfixCountOperation::Generate() {
1656 // Undo the optimistic smi operation.
1657 if (is_increment_) {
1658 __ subq(dst_, Immediate(Smi::FromInt(1)));
1659 } else {
1660 __ addq(dst_, Immediate(Smi::FromInt(1)));
1661 }
1662 __ push(dst_);
1663 __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION);
1664
1665 // Save the result of ToNumber to use as the old value.
1666 __ push(rax);
1667
1668 // Call the runtime for the addition or subtraction.
1669 __ push(rax);
1670 __ push(Immediate(Smi::FromInt(1)));
1671 if (is_increment_) {
1672 __ CallRuntime(Runtime::kNumberAdd, 2);
1673 } else {
1674 __ CallRuntime(Runtime::kNumberSub, 2);
1675 }
1676 if (!dst_.is(rax)) __ movq(dst_, rax);
1677 __ pop(old_);
1678 }
1679
1680
1681 void CodeGenerator::VisitCountOperation(CountOperation* node) {
1682 Comment cmnt(masm_, "[ CountOperation");
1683
1684 bool is_postfix = node->is_postfix();
1685 bool is_increment = node->op() == Token::INC;
1686
1687 Variable* var = node->expression()->AsVariableProxy()->AsVariable();
1688 bool is_const = (var != NULL && var->mode() == Variable::CONST);
1689
1690 // Postfix operations need a stack slot under the reference to hold
1691 // the old value while the new value is being stored. This is so that
1692 // in the case that storing the new value requires a call, the old
1693 // value will be in the frame to be spilled.
1694 if (is_postfix) frame_->Push(Smi::FromInt(0));
1695
1696 { Reference target(this, node->expression());
1697 if (target.is_illegal()) {
1698 // Spoof the virtual frame to have the expected height (one higher
1699 // than on entry).
1700 if (!is_postfix) frame_->Push(Smi::FromInt(0));
1701 return;
1702 }
1703 target.TakeValue(NOT_INSIDE_TYPEOF);
1704
1705 Result new_value = frame_->Pop();
1706 new_value.ToRegister();
1707
1708 Result old_value; // Only allocated in the postfix case.
1709 if (is_postfix) {
1710 // Allocate a temporary to preserve the old value.
1711 old_value = allocator_->Allocate();
1712 ASSERT(old_value.is_valid());
1713 __ movq(old_value.reg(), new_value.reg());
1714 }
1715 // Ensure the new value is writable.
1716 frame_->Spill(new_value.reg());
1717
1718 // In order to combine the overflow and the smi tag check, we need
1719 // to be able to allocate a byte register. We attempt to do so
1720 // without spilling. If we fail, we will generate separate overflow
1721 // and smi tag checks.
1722 //
1723 // We allocate and clear the temporary register before
1724 // performing the count operation since clearing the register using
1725 // xor will clear the overflow flag.
1726 Result tmp = allocator_->AllocateWithoutSpilling();
William Hesse 2009/06/25 10:46:05 The allocation and use of tmp can be replaced with
1727 if (tmp.is_valid()) {
1728 // Clear tmp.reg() to prepare it for setcc after the operation below.
1729 __ xor_(tmp.reg(), tmp.reg());
1730 }
1731
1732 DeferredCode* deferred = NULL;
1733 if (is_postfix) {
1734 deferred = new DeferredPostfixCountOperation(new_value.reg(),
1735 old_value.reg(),
1736 is_increment);
1737 } else {
1738 deferred = new DeferredPrefixCountOperation(new_value.reg(),
1739 is_increment);
1740 }
1741
1742 if (is_increment) {
1743 __ addq(new_value.reg(), Immediate(Smi::FromInt(1)));
1744 } else {
1745 __ subq(new_value.reg(), Immediate(Smi::FromInt(1)));
1746 }
1747
1748 // If the count operation didn't overflow and the result is a valid
1749 // smi, we're done. Otherwise, we jump to the deferred slow-case
1750 // code.
1751 if (tmp.is_valid()) {
1752 // We combine the overflow and the smi tag check if we could
1753 // successfully allocate a temporary byte register.
1754 __ setcc(overflow, tmp.reg());
1755 __ or_(tmp.reg(), new_value.reg());
1756 __ testl(tmp.reg(), Immediate(kSmiTagMask));
1757 tmp.Unuse();
1758 deferred->Branch(not_zero);
1759 } else {
1760 // Otherwise we test separately for overflow and smi tag.
1761 deferred->Branch(overflow);
1762 __ testl(new_value.reg(), Immediate(kSmiTagMask));
1763 deferred->Branch(not_zero);
1764 }
1765 deferred->BindExit();
1766
1767 // Postfix: store the old value in the allocated slot under the
1768 // reference.
1769 if (is_postfix) frame_->SetElementAt(target.size(), &old_value);
1770
1771 frame_->Push(&new_value);
1772 // Non-constant: update the reference.
1773 if (!is_const) target.SetValue(NOT_CONST_INIT);
1774 }
1775
1776 // Postfix: drop the new value and use the old.
1777 if (is_postfix) frame_->Drop();
1778 }
1779
1780
1599 void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) { 1781 void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) {
1600 // TODO(X64): This code was copied verbatim from codegen-ia32. 1782 // TODO(X64): This code was copied verbatim from codegen-ia32.
1601 // Either find a reason to change it or move it to a shared location. 1783 // Either find a reason to change it or move it to a shared location.
1602 1784
1603 // Note that due to an optimization in comparison operations (typeof 1785 // Note that due to an optimization in comparison operations (typeof
1604 // compared to a string literal), we can evaluate a binary expression such 1786 // compared to a string literal), we can evaluate a binary expression such
1605 // as AND or OR and not leave a value on the frame or in the cc register. 1787 // as AND or OR and not leave a value on the frame or in the cc register.
1606 Comment cmnt(masm_, "[ BinaryOperation"); 1788 Comment cmnt(masm_, "[ BinaryOperation");
1607 Token::Value op = node->op(); 1789 Token::Value op = node->op();
1608 1790
(...skipping 2047 matching lines...) Expand 10 before | Expand all | Expand 10 after
3656 cgen_->frame()->Push(&answer); 3838 cgen_->frame()->Push(&answer);
3657 break; 3839 break;
3658 } 3840 }
3659 3841
3660 default: 3842 default:
3661 UNREACHABLE(); 3843 UNREACHABLE();
3662 } 3844 }
3663 } 3845 }
3664 3846
3665 3847
3848 void Reference::TakeValue(TypeofState typeof_state) {
3849 // TODO(X64): This function is completely architecture independent. Move
3850 // it somewhere shared.
3851
3852 // For non-constant frame-allocated slots, we invalidate the value in the
3853 // slot. For all others, we fall back on GetValue.
3854 ASSERT(!cgen_->in_spilled_code());
3855 ASSERT(!is_illegal());
3856 if (type_ != SLOT) {
3857 GetValue(typeof_state);
3858 return;
3859 }
3860
3861 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
3862 ASSERT(slot != NULL);
3863 if (slot->type() == Slot::LOOKUP ||
3864 slot->type() == Slot::CONTEXT ||
3865 slot->var()->mode() == Variable::CONST) {
3866 GetValue(typeof_state);
3867 return;
3868 }
3869
3870 // Only non-constant, frame-allocated parameters and locals can reach
3871 // here.
3872 if (slot->type() == Slot::PARAMETER) {
3873 cgen_->frame()->TakeParameterAt(slot->index());
3874 } else {
3875 ASSERT(slot->type() == Slot::LOCAL);
3876 cgen_->frame()->TakeLocalAt(slot->index());
3877 }
3878 }
3879
3880
3666 void Reference::SetValue(InitState init_state) { 3881 void Reference::SetValue(InitState init_state) {
3667 ASSERT(cgen_->HasValidEntryRegisters()); 3882 ASSERT(cgen_->HasValidEntryRegisters());
3668 ASSERT(!is_illegal()); 3883 ASSERT(!is_illegal());
3669 MacroAssembler* masm = cgen_->masm(); 3884 MacroAssembler* masm = cgen_->masm();
3670 switch (type_) { 3885 switch (type_) {
3671 case SLOT: { 3886 case SLOT: {
3672 Comment cmnt(masm, "[ Store to Slot"); 3887 Comment cmnt(masm, "[ Store to Slot");
3673 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot(); 3888 Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
3674 ASSERT(slot != NULL); 3889 ASSERT(slot != NULL);
3675 cgen_->StoreToSlot(slot, init_state); 3890 cgen_->StoreToSlot(slot, init_state);
(...skipping 1459 matching lines...) Expand 10 before | Expand all | Expand 10 after
5135 break; 5350 break;
5136 default: 5351 default:
5137 UNREACHABLE(); 5352 UNREACHABLE();
5138 } 5353 }
5139 } 5354 }
5140 5355
5141 5356
5142 #undef __ 5357 #undef __
5143 5358
5144 } } // namespace v8::internal 5359 } } // namespace v8::internal
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698