Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(156)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 7063017: Rename TypeRecording...Stub into ...Stub. (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Created 9 years, 7 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 1689 matching lines...) Expand 10 before | Expand all | Expand 10 after
1700 // If length is not zero, "tos_" contains a non-zero value ==> true. 1700 // If length is not zero, "tos_" contains a non-zero value ==> true.
1701 __ Ret(); 1701 __ Ret();
1702 1702
1703 // Return 0 in "tos_" for false . 1703 // Return 0 in "tos_" for false .
1704 __ bind(&false_result); 1704 __ bind(&false_result);
1705 __ mov(tos_, Operand(0, RelocInfo::NONE)); 1705 __ mov(tos_, Operand(0, RelocInfo::NONE));
1706 __ Ret(); 1706 __ Ret();
1707 } 1707 }
1708 1708
1709 1709
1710 Handle<Code> GetTypeRecordingUnaryOpStub(int key, 1710 Handle<Code> GetUnaryOpStub(int key,
Søren Thygesen Gjesse 2011/05/24 11:33:11 Indentation.
fschneider 2011/05/24 12:16:41 Done.
1711 TRUnaryOpIC::TypeInfo type_info) { 1711 UnaryOpIC::TypeInfo type_info) {
1712 TypeRecordingUnaryOpStub stub(key, type_info); 1712 UnaryOpStub stub(key, type_info);
1713 return stub.GetCode(); 1713 return stub.GetCode();
1714 } 1714 }
1715 1715
1716 1716
1717 const char* TypeRecordingUnaryOpStub::GetName() { 1717 const char* UnaryOpStub::GetName() {
1718 if (name_ != NULL) return name_; 1718 if (name_ != NULL) return name_;
1719 const int kMaxNameLength = 100; 1719 const int kMaxNameLength = 100;
1720 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( 1720 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
1721 kMaxNameLength); 1721 kMaxNameLength);
1722 if (name_ == NULL) return "OOM"; 1722 if (name_ == NULL) return "OOM";
1723 const char* op_name = Token::Name(op_); 1723 const char* op_name = Token::Name(op_);
1724 const char* overwrite_name = NULL; // Make g++ happy. 1724 const char* overwrite_name = NULL; // Make g++ happy.
1725 switch (mode_) { 1725 switch (mode_) {
1726 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; 1726 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
1727 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; 1727 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
1728 } 1728 }
1729 1729
1730 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), 1730 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
1731 "TypeRecordingUnaryOpStub_%s_%s_%s", 1731 "UnaryOpStub_%s_%s_%s",
1732 op_name, 1732 op_name,
1733 overwrite_name, 1733 overwrite_name,
1734 TRUnaryOpIC::GetName(operand_type_)); 1734 UnaryOpIC::GetName(operand_type_));
1735 return name_; 1735 return name_;
1736 } 1736 }
1737 1737
1738 1738
1739 // TODO(svenpanne): Use virtual functions instead of switch. 1739 // TODO(svenpanne): Use virtual functions instead of switch.
1740 void TypeRecordingUnaryOpStub::Generate(MacroAssembler* masm) { 1740 void UnaryOpStub::Generate(MacroAssembler* masm) {
1741 switch (operand_type_) { 1741 switch (operand_type_) {
1742 case TRUnaryOpIC::UNINITIALIZED: 1742 case UnaryOpIC::UNINITIALIZED:
1743 GenerateTypeTransition(masm); 1743 GenerateTypeTransition(masm);
1744 break; 1744 break;
1745 case TRUnaryOpIC::SMI: 1745 case UnaryOpIC::SMI:
1746 GenerateSmiStub(masm); 1746 GenerateSmiStub(masm);
1747 break; 1747 break;
1748 case TRUnaryOpIC::HEAP_NUMBER: 1748 case UnaryOpIC::HEAP_NUMBER:
1749 GenerateHeapNumberStub(masm); 1749 GenerateHeapNumberStub(masm);
1750 break; 1750 break;
1751 case TRUnaryOpIC::GENERIC: 1751 case UnaryOpIC::GENERIC:
1752 GenerateGenericStub(masm); 1752 GenerateGenericStub(masm);
1753 break; 1753 break;
1754 } 1754 }
1755 } 1755 }
1756 1756
1757 1757
1758 void TypeRecordingUnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { 1758 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1759 // Prepare to push argument. 1759 // Prepare to push argument.
1760 __ mov(r3, Operand(r0)); 1760 __ mov(r3, Operand(r0));
1761 1761
1762 // Push this stub's key. Although the operation and the type info are 1762 // Push this stub's key. Although the operation and the type info are
1763 // encoded into the key, the encoding is opaque, so push them too. 1763 // encoded into the key, the encoding is opaque, so push them too.
1764 __ mov(r2, Operand(Smi::FromInt(MinorKey()))); 1764 __ mov(r2, Operand(Smi::FromInt(MinorKey())));
1765 __ mov(r1, Operand(Smi::FromInt(op_))); 1765 __ mov(r1, Operand(Smi::FromInt(op_)));
1766 __ mov(r0, Operand(Smi::FromInt(operand_type_))); 1766 __ mov(r0, Operand(Smi::FromInt(operand_type_)));
1767 1767
1768 __ Push(r3, r2, r1, r0); 1768 __ Push(r3, r2, r1, r0);
1769 1769
1770 __ TailCallExternalReference( 1770 __ TailCallExternalReference(
1771 ExternalReference(IC_Utility(IC::kTypeRecordingUnaryOp_Patch), 1771 ExternalReference(IC_Utility(IC::kUnaryOp_Patch),
1772 masm->isolate()), 1772 masm->isolate()),
1773 4, 1773 4,
1774 1); 1774 1);
1775 } 1775 }
1776 1776
1777 1777
1778 // TODO(svenpanne): Use virtual functions instead of switch. 1778 // TODO(svenpanne): Use virtual functions instead of switch.
1779 void TypeRecordingUnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { 1779 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
1780 switch (op_) { 1780 switch (op_) {
1781 case Token::SUB: 1781 case Token::SUB:
1782 GenerateSmiStubSub(masm); 1782 GenerateSmiStubSub(masm);
1783 break; 1783 break;
1784 case Token::BIT_NOT: 1784 case Token::BIT_NOT:
1785 GenerateSmiStubBitNot(masm); 1785 GenerateSmiStubBitNot(masm);
1786 break; 1786 break;
1787 default: 1787 default:
1788 UNREACHABLE(); 1788 UNREACHABLE();
1789 } 1789 }
1790 } 1790 }
1791 1791
1792 1792
1793 void TypeRecordingUnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { 1793 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
1794 Label non_smi, slow; 1794 Label non_smi, slow;
1795 GenerateSmiCodeSub(masm, &non_smi, &slow); 1795 GenerateSmiCodeSub(masm, &non_smi, &slow);
1796 __ bind(&non_smi); 1796 __ bind(&non_smi);
1797 __ bind(&slow); 1797 __ bind(&slow);
1798 GenerateTypeTransition(masm); 1798 GenerateTypeTransition(masm);
1799 } 1799 }
1800 1800
1801 1801
1802 void TypeRecordingUnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { 1802 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
1803 Label non_smi; 1803 Label non_smi;
1804 GenerateSmiCodeBitNot(masm, &non_smi); 1804 GenerateSmiCodeBitNot(masm, &non_smi);
1805 __ bind(&non_smi); 1805 __ bind(&non_smi);
1806 GenerateTypeTransition(masm); 1806 GenerateTypeTransition(masm);
1807 } 1807 }
1808 1808
1809 1809
1810 void TypeRecordingUnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, 1810 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
Søren Thygesen Gjesse 2011/05/24 11:33:11 Indentation.
fschneider 2011/05/24 12:16:41 Done.
1811 Label* non_smi, 1811 Label* non_smi,
1812 Label* slow) { 1812 Label* slow) {
1813 __ JumpIfNotSmi(r0, non_smi); 1813 __ JumpIfNotSmi(r0, non_smi);
1814 1814
1815 // The result of negating zero or the smallest negative smi is not a smi. 1815 // The result of negating zero or the smallest negative smi is not a smi.
1816 __ bic(ip, r0, Operand(0x80000000), SetCC); 1816 __ bic(ip, r0, Operand(0x80000000), SetCC);
1817 __ b(eq, slow); 1817 __ b(eq, slow);
1818 1818
1819 // Return '0 - value'. 1819 // Return '0 - value'.
1820 __ rsb(r0, r0, Operand(0, RelocInfo::NONE)); 1820 __ rsb(r0, r0, Operand(0, RelocInfo::NONE));
1821 __ Ret(); 1821 __ Ret();
1822 } 1822 }
1823 1823
1824 1824
1825 void TypeRecordingUnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm, 1825 void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm,
Søren Thygesen Gjesse 2011/05/24 11:33:11 Indentation.
fschneider 2011/05/24 12:16:41 Done.
1826 Label* non_smi) { 1826 Label* non_smi) {
1827 __ JumpIfNotSmi(r0, non_smi); 1827 __ JumpIfNotSmi(r0, non_smi);
1828 1828
1829 // Flip bits and revert inverted smi-tag. 1829 // Flip bits and revert inverted smi-tag.
1830 __ mvn(r0, Operand(r0)); 1830 __ mvn(r0, Operand(r0));
1831 __ bic(r0, r0, Operand(kSmiTagMask)); 1831 __ bic(r0, r0, Operand(kSmiTagMask));
1832 __ Ret(); 1832 __ Ret();
1833 } 1833 }
1834 1834
1835 1835
1836 // TODO(svenpanne): Use virtual functions instead of switch. 1836 // TODO(svenpanne): Use virtual functions instead of switch.
1837 void TypeRecordingUnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { 1837 void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
1838 switch (op_) { 1838 switch (op_) {
1839 case Token::SUB: 1839 case Token::SUB:
1840 GenerateHeapNumberStubSub(masm); 1840 GenerateHeapNumberStubSub(masm);
1841 break; 1841 break;
1842 case Token::BIT_NOT: 1842 case Token::BIT_NOT:
1843 GenerateHeapNumberStubBitNot(masm); 1843 GenerateHeapNumberStubBitNot(masm);
1844 break; 1844 break;
1845 default: 1845 default:
1846 UNREACHABLE(); 1846 UNREACHABLE();
1847 } 1847 }
1848 } 1848 }
1849 1849
1850 1850
1851 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { 1851 void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) {
1852 Label non_smi, slow, call_builtin; 1852 Label non_smi, slow, call_builtin;
1853 GenerateSmiCodeSub(masm, &non_smi, &call_builtin); 1853 GenerateSmiCodeSub(masm, &non_smi, &call_builtin);
1854 __ bind(&non_smi); 1854 __ bind(&non_smi);
1855 GenerateHeapNumberCodeSub(masm, &slow); 1855 GenerateHeapNumberCodeSub(masm, &slow);
1856 __ bind(&slow); 1856 __ bind(&slow);
1857 GenerateTypeTransition(masm); 1857 GenerateTypeTransition(masm);
1858 __ bind(&call_builtin); 1858 __ bind(&call_builtin);
1859 GenerateGenericCodeFallback(masm); 1859 GenerateGenericCodeFallback(masm);
1860 } 1860 }
1861 1861
1862 1862
1863 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot( 1863 void UnaryOpStub::GenerateHeapNumberStubBitNot(
Søren Thygesen Gjesse 2011/05/24 11:33:11 Fits on one line?
fschneider 2011/05/24 12:16:41 Done.
1864 MacroAssembler* masm) { 1864 MacroAssembler* masm) {
1865 Label non_smi, slow; 1865 Label non_smi, slow;
1866 GenerateSmiCodeBitNot(masm, &non_smi); 1866 GenerateSmiCodeBitNot(masm, &non_smi);
1867 __ bind(&non_smi); 1867 __ bind(&non_smi);
1868 GenerateHeapNumberCodeBitNot(masm, &slow); 1868 GenerateHeapNumberCodeBitNot(masm, &slow);
1869 __ bind(&slow); 1869 __ bind(&slow);
1870 GenerateTypeTransition(masm); 1870 GenerateTypeTransition(masm);
1871 } 1871 }
1872 1872
1873 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, 1873 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
Søren Thygesen Gjesse 2011/05/24 11:33:11 Indentation.
fschneider 2011/05/24 12:16:41 Done.
1874 Label* slow) { 1874 Label* slow) {
1875 EmitCheckForHeapNumber(masm, r0, r1, r6, slow); 1875 EmitCheckForHeapNumber(masm, r0, r1, r6, slow);
1876 // r0 is a heap number. Get a new heap number in r1. 1876 // r0 is a heap number. Get a new heap number in r1.
1877 if (mode_ == UNARY_OVERWRITE) { 1877 if (mode_ == UNARY_OVERWRITE) {
1878 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 1878 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
1879 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. 1879 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
1880 __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 1880 __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
1881 } else { 1881 } else {
1882 Label slow_allocate_heapnumber, heapnumber_allocated; 1882 Label slow_allocate_heapnumber, heapnumber_allocated;
1883 __ AllocateHeapNumber(r1, r2, r3, r6, &slow_allocate_heapnumber); 1883 __ AllocateHeapNumber(r1, r2, r3, r6, &slow_allocate_heapnumber);
(...skipping 12 matching lines...) Expand all
1896 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); 1896 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
1897 __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset)); 1897 __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset));
1898 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. 1898 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
1899 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset)); 1899 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset));
1900 __ mov(r0, Operand(r1)); 1900 __ mov(r0, Operand(r1));
1901 } 1901 }
1902 __ Ret(); 1902 __ Ret();
1903 } 1903 }
1904 1904
1905 1905
1906 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeBitNot( 1906 void UnaryOpStub::GenerateHeapNumberCodeBitNot(
1907 MacroAssembler* masm, Label* slow) { 1907 MacroAssembler* masm, Label* slow) {
1908 EmitCheckForHeapNumber(masm, r0, r1, r6, slow); 1908 EmitCheckForHeapNumber(masm, r0, r1, r6, slow);
1909 // Convert the heap number is r0 to an untagged integer in r1. 1909 // Convert the heap number is r0 to an untagged integer in r1.
1910 __ ConvertToInt32(r0, r1, r2, r3, d0, slow); 1910 __ ConvertToInt32(r0, r1, r2, r3, d0, slow);
1911 1911
1912 // Do the bitwise operation and check if the result fits in a smi. 1912 // Do the bitwise operation and check if the result fits in a smi.
1913 Label try_float; 1913 Label try_float;
1914 __ mvn(r1, Operand(r1)); 1914 __ mvn(r1, Operand(r1));
1915 __ add(r2, r1, Operand(0x40000000), SetCC); 1915 __ add(r2, r1, Operand(0x40000000), SetCC);
1916 __ b(mi, &try_float); 1916 __ b(mi, &try_float);
(...skipping 30 matching lines...) Expand all
1947 } else { 1947 } else {
1948 // WriteInt32ToHeapNumberStub does not trigger GC, so we do not 1948 // WriteInt32ToHeapNumberStub does not trigger GC, so we do not
1949 // have to set up a frame. 1949 // have to set up a frame.
1950 WriteInt32ToHeapNumberStub stub(r1, r0, r2); 1950 WriteInt32ToHeapNumberStub stub(r1, r0, r2);
1951 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); 1951 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
1952 } 1952 }
1953 } 1953 }
1954 1954
1955 1955
1956 // TODO(svenpanne): Use virtual functions instead of switch. 1956 // TODO(svenpanne): Use virtual functions instead of switch.
1957 void TypeRecordingUnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { 1957 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
1958 switch (op_) { 1958 switch (op_) {
1959 case Token::SUB: 1959 case Token::SUB:
1960 GenerateGenericStubSub(masm); 1960 GenerateGenericStubSub(masm);
1961 break; 1961 break;
1962 case Token::BIT_NOT: 1962 case Token::BIT_NOT:
1963 GenerateGenericStubBitNot(masm); 1963 GenerateGenericStubBitNot(masm);
1964 break; 1964 break;
1965 default: 1965 default:
1966 UNREACHABLE(); 1966 UNREACHABLE();
1967 } 1967 }
1968 } 1968 }
1969 1969
1970 1970
1971 void TypeRecordingUnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { 1971 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
1972 Label non_smi, slow; 1972 Label non_smi, slow;
1973 GenerateSmiCodeSub(masm, &non_smi, &slow); 1973 GenerateSmiCodeSub(masm, &non_smi, &slow);
1974 __ bind(&non_smi); 1974 __ bind(&non_smi);
1975 GenerateHeapNumberCodeSub(masm, &slow); 1975 GenerateHeapNumberCodeSub(masm, &slow);
1976 __ bind(&slow); 1976 __ bind(&slow);
1977 GenerateGenericCodeFallback(masm); 1977 GenerateGenericCodeFallback(masm);
1978 } 1978 }
1979 1979
1980 1980
1981 void TypeRecordingUnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { 1981 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
1982 Label non_smi, slow; 1982 Label non_smi, slow;
1983 GenerateSmiCodeBitNot(masm, &non_smi); 1983 GenerateSmiCodeBitNot(masm, &non_smi);
1984 __ bind(&non_smi); 1984 __ bind(&non_smi);
1985 GenerateHeapNumberCodeBitNot(masm, &slow); 1985 GenerateHeapNumberCodeBitNot(masm, &slow);
1986 __ bind(&slow); 1986 __ bind(&slow);
1987 GenerateGenericCodeFallback(masm); 1987 GenerateGenericCodeFallback(masm);
1988 } 1988 }
1989 1989
1990 1990
1991 void TypeRecordingUnaryOpStub::GenerateGenericCodeFallback( 1991 void UnaryOpStub::GenerateGenericCodeFallback(
Søren Thygesen Gjesse 2011/05/24 11:33:11 Fits on one line?
fschneider 2011/05/24 12:16:41 Done.
1992 MacroAssembler* masm) { 1992 MacroAssembler* masm) {
1993 // Handle the slow case by jumping to the JavaScript builtin. 1993 // Handle the slow case by jumping to the JavaScript builtin.
1994 __ push(r0); 1994 __ push(r0);
1995 switch (op_) { 1995 switch (op_) {
1996 case Token::SUB: 1996 case Token::SUB:
1997 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); 1997 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
1998 break; 1998 break;
1999 case Token::BIT_NOT: 1999 case Token::BIT_NOT:
2000 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); 2000 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
2001 break; 2001 break;
2002 default: 2002 default:
2003 UNREACHABLE(); 2003 UNREACHABLE();
2004 } 2004 }
2005 } 2005 }
2006 2006
2007 2007
2008 Handle<Code> GetTypeRecordingBinaryOpStub(int key, 2008 Handle<Code> GetBinaryOpStub(int key,
Søren Thygesen Gjesse 2011/05/24 11:33:11 Full indentation (not 4 spaces) or int key on a se
fschneider 2011/05/24 12:16:41 Done.
2009 TRBinaryOpIC::TypeInfo type_info, 2009 BinaryOpIC::TypeInfo type_info,
2010 TRBinaryOpIC::TypeInfo result_type_info) { 2010 BinaryOpIC::TypeInfo result_type_info) {
2011 TypeRecordingBinaryOpStub stub(key, type_info, result_type_info); 2011 BinaryOpStub stub(key, type_info, result_type_info);
2012 return stub.GetCode(); 2012 return stub.GetCode();
2013 } 2013 }
2014 2014
2015 2015
2016 void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { 2016 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
2017 Label get_result; 2017 Label get_result;
2018 2018
2019 __ Push(r1, r0); 2019 __ Push(r1, r0);
2020 2020
2021 __ mov(r2, Operand(Smi::FromInt(MinorKey()))); 2021 __ mov(r2, Operand(Smi::FromInt(MinorKey())));
2022 __ mov(r1, Operand(Smi::FromInt(op_))); 2022 __ mov(r1, Operand(Smi::FromInt(op_)));
2023 __ mov(r0, Operand(Smi::FromInt(operands_type_))); 2023 __ mov(r0, Operand(Smi::FromInt(operands_type_)));
2024 __ Push(r2, r1, r0); 2024 __ Push(r2, r1, r0);
2025 2025
2026 __ TailCallExternalReference( 2026 __ TailCallExternalReference(
2027 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch), 2027 ExternalReference(IC_Utility(IC::kBinaryOp_Patch),
2028 masm->isolate()), 2028 masm->isolate()),
2029 5, 2029 5,
2030 1); 2030 1);
2031 } 2031 }
2032 2032
2033 2033
2034 void TypeRecordingBinaryOpStub::GenerateTypeTransitionWithSavedArgs( 2034 void BinaryOpStub::GenerateTypeTransitionWithSavedArgs(
2035 MacroAssembler* masm) { 2035 MacroAssembler* masm) {
2036 UNIMPLEMENTED(); 2036 UNIMPLEMENTED();
2037 } 2037 }
2038 2038
2039 2039
2040 void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) { 2040 void BinaryOpStub::Generate(MacroAssembler* masm) {
2041 switch (operands_type_) { 2041 switch (operands_type_) {
2042 case TRBinaryOpIC::UNINITIALIZED: 2042 case BinaryOpIC::UNINITIALIZED:
2043 GenerateTypeTransition(masm); 2043 GenerateTypeTransition(masm);
2044 break; 2044 break;
2045 case TRBinaryOpIC::SMI: 2045 case BinaryOpIC::SMI:
2046 GenerateSmiStub(masm); 2046 GenerateSmiStub(masm);
2047 break; 2047 break;
2048 case TRBinaryOpIC::INT32: 2048 case BinaryOpIC::INT32:
2049 GenerateInt32Stub(masm); 2049 GenerateInt32Stub(masm);
2050 break; 2050 break;
2051 case TRBinaryOpIC::HEAP_NUMBER: 2051 case BinaryOpIC::HEAP_NUMBER:
2052 GenerateHeapNumberStub(masm); 2052 GenerateHeapNumberStub(masm);
2053 break; 2053 break;
2054 case TRBinaryOpIC::ODDBALL: 2054 case BinaryOpIC::ODDBALL:
2055 GenerateOddballStub(masm); 2055 GenerateOddballStub(masm);
2056 break; 2056 break;
2057 case TRBinaryOpIC::BOTH_STRING: 2057 case BinaryOpIC::BOTH_STRING:
2058 GenerateBothStringStub(masm); 2058 GenerateBothStringStub(masm);
2059 break; 2059 break;
2060 case TRBinaryOpIC::STRING: 2060 case BinaryOpIC::STRING:
2061 GenerateStringStub(masm); 2061 GenerateStringStub(masm);
2062 break; 2062 break;
2063 case TRBinaryOpIC::GENERIC: 2063 case BinaryOpIC::GENERIC:
2064 GenerateGeneric(masm); 2064 GenerateGeneric(masm);
2065 break; 2065 break;
2066 default: 2066 default:
2067 UNREACHABLE(); 2067 UNREACHABLE();
2068 } 2068 }
2069 } 2069 }
2070 2070
2071 2071
2072 const char* TypeRecordingBinaryOpStub::GetName() { 2072 const char* BinaryOpStub::GetName() {
2073 if (name_ != NULL) return name_; 2073 if (name_ != NULL) return name_;
2074 const int kMaxNameLength = 100; 2074 const int kMaxNameLength = 100;
2075 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( 2075 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
2076 kMaxNameLength); 2076 kMaxNameLength);
2077 if (name_ == NULL) return "OOM"; 2077 if (name_ == NULL) return "OOM";
2078 const char* op_name = Token::Name(op_); 2078 const char* op_name = Token::Name(op_);
2079 const char* overwrite_name; 2079 const char* overwrite_name;
2080 switch (mode_) { 2080 switch (mode_) {
2081 case NO_OVERWRITE: overwrite_name = "Alloc"; break; 2081 case NO_OVERWRITE: overwrite_name = "Alloc"; break;
2082 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break; 2082 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
2083 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break; 2083 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
2084 default: overwrite_name = "UnknownOverwrite"; break; 2084 default: overwrite_name = "UnknownOverwrite"; break;
2085 } 2085 }
2086 2086
2087 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), 2087 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
2088 "TypeRecordingBinaryOpStub_%s_%s_%s", 2088 "BinaryOpStub_%s_%s_%s",
2089 op_name, 2089 op_name,
2090 overwrite_name, 2090 overwrite_name,
2091 TRBinaryOpIC::GetName(operands_type_)); 2091 BinaryOpIC::GetName(operands_type_));
2092 return name_; 2092 return name_;
2093 } 2093 }
2094 2094
2095 2095
2096 void TypeRecordingBinaryOpStub::GenerateSmiSmiOperation( 2096 void BinaryOpStub::GenerateSmiSmiOperation(
Søren Thygesen Gjesse 2011/05/24 11:33:11 Fits one line?
fschneider 2011/05/24 12:16:41 Done.
2097 MacroAssembler* masm) { 2097 MacroAssembler* masm) {
2098 Register left = r1; 2098 Register left = r1;
2099 Register right = r0; 2099 Register right = r0;
2100 Register scratch1 = r7; 2100 Register scratch1 = r7;
2101 Register scratch2 = r9; 2101 Register scratch2 = r9;
2102 2102
2103 ASSERT(right.is(r0)); 2103 ASSERT(right.is(r0));
2104 STATIC_ASSERT(kSmiTag == 0); 2104 STATIC_ASSERT(kSmiTag == 0);
2105 2105
2106 Label not_smi_result; 2106 Label not_smi_result;
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
2212 __ SmiTag(right, scratch1); 2212 __ SmiTag(right, scratch1);
2213 __ Ret(); 2213 __ Ret();
2214 break; 2214 break;
2215 default: 2215 default:
2216 UNREACHABLE(); 2216 UNREACHABLE();
2217 } 2217 }
2218 __ bind(&not_smi_result); 2218 __ bind(&not_smi_result);
2219 } 2219 }
2220 2220
2221 2221
2222 void TypeRecordingBinaryOpStub::GenerateFPOperation(MacroAssembler* masm, 2222 void BinaryOpStub::GenerateFPOperation(MacroAssembler* masm,
Søren Thygesen Gjesse 2011/05/24 11:33:11 Indentation.
fschneider 2011/05/24 12:16:41 Done.
2223 bool smi_operands, 2223 bool smi_operands,
2224 Label* not_numbers, 2224 Label* not_numbers,
2225 Label* gc_required) { 2225 Label* gc_required) {
2226 Register left = r1; 2226 Register left = r1;
2227 Register right = r0; 2227 Register right = r0;
2228 Register scratch1 = r7; 2228 Register scratch1 = r7;
2229 Register scratch2 = r9; 2229 Register scratch2 = r9;
2230 Register scratch3 = r4; 2230 Register scratch3 = r4;
2231 2231
2232 ASSERT(smi_operands || (not_numbers != NULL)); 2232 ASSERT(smi_operands || (not_numbers != NULL));
(...skipping 193 matching lines...) Expand 10 before | Expand all | Expand 10 after
2426 default: 2426 default:
2427 UNREACHABLE(); 2427 UNREACHABLE();
2428 } 2428 }
2429 } 2429 }
2430 2430
2431 2431
2432 // Generate the smi code. If the operation on smis are successful this return is 2432 // Generate the smi code. If the operation on smis are successful this return is
2433 // generated. If the result is not a smi and heap number allocation is not 2433 // generated. If the result is not a smi and heap number allocation is not
2434 // requested the code falls through. If number allocation is requested but a 2434 // requested the code falls through. If number allocation is requested but a
2435 // heap number cannot be allocated the code jumps to the lable gc_required. 2435 // heap number cannot be allocated the code jumps to the lable gc_required.
2436 void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, 2436 void BinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
Søren Thygesen Gjesse 2011/05/24 11:33:11 MacroAssembler* masm, should go on a separate line
fschneider 2011/05/24 12:16:41 Done.
2437 Label* use_runtime, 2437 Label* use_runtime,
2438 Label* gc_required, 2438 Label* gc_required,
2439 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) { 2439 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) {
2440 Label not_smis; 2440 Label not_smis;
2441 2441
2442 Register left = r1; 2442 Register left = r1;
2443 Register right = r0; 2443 Register right = r0;
2444 Register scratch1 = r7; 2444 Register scratch1 = r7;
2445 Register scratch2 = r9; 2445 Register scratch2 = r9;
2446 2446
2447 // Perform combined smi check on both operands. 2447 // Perform combined smi check on both operands.
2448 __ orr(scratch1, left, Operand(right)); 2448 __ orr(scratch1, left, Operand(right));
2449 STATIC_ASSERT(kSmiTag == 0); 2449 STATIC_ASSERT(kSmiTag == 0);
2450 __ tst(scratch1, Operand(kSmiTagMask)); 2450 __ tst(scratch1, Operand(kSmiTagMask));
2451 __ b(ne, &not_smis); 2451 __ b(ne, &not_smis);
2452 2452
2453 // If the smi-smi operation results in a smi return is generated. 2453 // If the smi-smi operation results in a smi return is generated.
2454 GenerateSmiSmiOperation(masm); 2454 GenerateSmiSmiOperation(masm);
2455 2455
2456 // If heap number results are possible generate the result in an allocated 2456 // If heap number results are possible generate the result in an allocated
2457 // heap number. 2457 // heap number.
2458 if (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) { 2458 if (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) {
2459 GenerateFPOperation(masm, true, use_runtime, gc_required); 2459 GenerateFPOperation(masm, true, use_runtime, gc_required);
2460 } 2460 }
2461 __ bind(&not_smis); 2461 __ bind(&not_smis);
2462 } 2462 }
2463 2463
2464 2464
2465 void TypeRecordingBinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { 2465 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
2466 Label not_smis, call_runtime; 2466 Label not_smis, call_runtime;
2467 2467
2468 if (result_type_ == TRBinaryOpIC::UNINITIALIZED || 2468 if (result_type_ == BinaryOpIC::UNINITIALIZED ||
2469 result_type_ == TRBinaryOpIC::SMI) { 2469 result_type_ == BinaryOpIC::SMI) {
2470 // Only allow smi results. 2470 // Only allow smi results.
2471 GenerateSmiCode(masm, &call_runtime, NULL, NO_HEAPNUMBER_RESULTS); 2471 GenerateSmiCode(masm, &call_runtime, NULL, NO_HEAPNUMBER_RESULTS);
2472 } else { 2472 } else {
2473 // Allow heap number result and don't make a transition if a heap number 2473 // Allow heap number result and don't make a transition if a heap number
2474 // cannot be allocated. 2474 // cannot be allocated.
2475 GenerateSmiCode(masm, 2475 GenerateSmiCode(masm,
2476 &call_runtime, 2476 &call_runtime,
2477 &call_runtime, 2477 &call_runtime,
2478 ALLOW_HEAPNUMBER_RESULTS); 2478 ALLOW_HEAPNUMBER_RESULTS);
2479 } 2479 }
2480 2480
2481 // Code falls through if the result is not returned as either a smi or heap 2481 // Code falls through if the result is not returned as either a smi or heap
2482 // number. 2482 // number.
2483 GenerateTypeTransition(masm); 2483 GenerateTypeTransition(masm);
2484 2484
2485 __ bind(&call_runtime); 2485 __ bind(&call_runtime);
2486 GenerateCallRuntime(masm); 2486 GenerateCallRuntime(masm);
2487 } 2487 }
2488 2488
2489 2489
2490 void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) { 2490 void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) {
2491 ASSERT(operands_type_ == TRBinaryOpIC::STRING); 2491 ASSERT(operands_type_ == BinaryOpIC::STRING);
2492 ASSERT(op_ == Token::ADD); 2492 ASSERT(op_ == Token::ADD);
2493 // Try to add arguments as strings, otherwise, transition to the generic 2493 // Try to add arguments as strings, otherwise, transition to the generic
2494 // TRBinaryOpIC type. 2494 // BinaryOpIC type.
2495 GenerateAddStrings(masm); 2495 GenerateAddStrings(masm);
2496 GenerateTypeTransition(masm); 2496 GenerateTypeTransition(masm);
2497 } 2497 }
2498 2498
2499 2499
2500 void TypeRecordingBinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) { 2500 void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) {
2501 Label call_runtime; 2501 Label call_runtime;
2502 ASSERT(operands_type_ == TRBinaryOpIC::BOTH_STRING); 2502 ASSERT(operands_type_ == BinaryOpIC::BOTH_STRING);
2503 ASSERT(op_ == Token::ADD); 2503 ASSERT(op_ == Token::ADD);
2504 // If both arguments are strings, call the string add stub. 2504 // If both arguments are strings, call the string add stub.
2505 // Otherwise, do a transition. 2505 // Otherwise, do a transition.
2506 2506
2507 // Registers containing left and right operands respectively. 2507 // Registers containing left and right operands respectively.
2508 Register left = r1; 2508 Register left = r1;
2509 Register right = r0; 2509 Register right = r0;
2510 2510
2511 // Test if left operand is a string. 2511 // Test if left operand is a string.
2512 __ JumpIfSmi(left, &call_runtime); 2512 __ JumpIfSmi(left, &call_runtime);
2513 __ CompareObjectType(left, r2, r2, FIRST_NONSTRING_TYPE); 2513 __ CompareObjectType(left, r2, r2, FIRST_NONSTRING_TYPE);
2514 __ b(ge, &call_runtime); 2514 __ b(ge, &call_runtime);
2515 2515
2516 // Test if right operand is a string. 2516 // Test if right operand is a string.
2517 __ JumpIfSmi(right, &call_runtime); 2517 __ JumpIfSmi(right, &call_runtime);
2518 __ CompareObjectType(right, r2, r2, FIRST_NONSTRING_TYPE); 2518 __ CompareObjectType(right, r2, r2, FIRST_NONSTRING_TYPE);
2519 __ b(ge, &call_runtime); 2519 __ b(ge, &call_runtime);
2520 2520
2521 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB); 2521 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB);
2522 GenerateRegisterArgsPush(masm); 2522 GenerateRegisterArgsPush(masm);
2523 __ TailCallStub(&string_add_stub); 2523 __ TailCallStub(&string_add_stub);
2524 2524
2525 __ bind(&call_runtime); 2525 __ bind(&call_runtime);
2526 GenerateTypeTransition(masm); 2526 GenerateTypeTransition(masm);
2527 } 2527 }
2528 2528
2529 2529
2530 void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) { 2530 void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) {
2531 ASSERT(operands_type_ == TRBinaryOpIC::INT32); 2531 ASSERT(operands_type_ == BinaryOpIC::INT32);
2532 2532
2533 Register left = r1; 2533 Register left = r1;
2534 Register right = r0; 2534 Register right = r0;
2535 Register scratch1 = r7; 2535 Register scratch1 = r7;
2536 Register scratch2 = r9; 2536 Register scratch2 = r9;
2537 DwVfpRegister double_scratch = d0; 2537 DwVfpRegister double_scratch = d0;
2538 SwVfpRegister single_scratch = s3; 2538 SwVfpRegister single_scratch = s3;
2539 2539
2540 Register heap_number_result = no_reg; 2540 Register heap_number_result = no_reg;
2541 Register heap_number_map = r6; 2541 Register heap_number_map = r6;
(...skipping 76 matching lines...) Expand 10 before | Expand all | Expand 10 after
2618 // Try to return a smi if we can. 2618 // Try to return a smi if we can.
2619 // Otherwise return a heap number if allowed, or jump to type 2619 // Otherwise return a heap number if allowed, or jump to type
2620 // transition. 2620 // transition.
2621 2621
2622 __ EmitVFPTruncate(kRoundToZero, 2622 __ EmitVFPTruncate(kRoundToZero,
2623 single_scratch, 2623 single_scratch,
2624 d5, 2624 d5,
2625 scratch1, 2625 scratch1,
2626 scratch2); 2626 scratch2);
2627 2627
2628 if (result_type_ <= TRBinaryOpIC::INT32) { 2628 if (result_type_ <= BinaryOpIC::INT32) {
2629 // If the ne condition is set, result does 2629 // If the ne condition is set, result does
2630 // not fit in a 32-bit integer. 2630 // not fit in a 32-bit integer.
2631 __ b(ne, &transition); 2631 __ b(ne, &transition);
2632 } 2632 }
2633 2633
2634 // Check if the result fits in a smi. 2634 // Check if the result fits in a smi.
2635 __ vmov(scratch1, single_scratch); 2635 __ vmov(scratch1, single_scratch);
2636 __ add(scratch2, scratch1, Operand(0x40000000), SetCC); 2636 __ add(scratch2, scratch1, Operand(0x40000000), SetCC);
2637 // If not try to return a heap number. 2637 // If not try to return a heap number.
2638 __ b(mi, &return_heap_number); 2638 __ b(mi, &return_heap_number);
2639 // Check for minus zero. Return heap number for minus zero. 2639 // Check for minus zero. Return heap number for minus zero.
2640 Label not_zero; 2640 Label not_zero;
2641 __ cmp(scratch1, Operand(0)); 2641 __ cmp(scratch1, Operand(0));
2642 __ b(ne, &not_zero); 2642 __ b(ne, &not_zero);
2643 __ vmov(scratch2, d5.high()); 2643 __ vmov(scratch2, d5.high());
2644 __ tst(scratch2, Operand(HeapNumber::kSignMask)); 2644 __ tst(scratch2, Operand(HeapNumber::kSignMask));
2645 __ b(ne, &return_heap_number); 2645 __ b(ne, &return_heap_number);
2646 __ bind(&not_zero); 2646 __ bind(&not_zero);
2647 2647
2648 // Tag the result and return. 2648 // Tag the result and return.
2649 __ SmiTag(r0, scratch1); 2649 __ SmiTag(r0, scratch1);
2650 __ Ret(); 2650 __ Ret();
2651 } else { 2651 } else {
2652 // DIV just falls through to allocating a heap number. 2652 // DIV just falls through to allocating a heap number.
2653 } 2653 }
2654 2654
2655 if (result_type_ >= (op_ == Token::DIV) ? TRBinaryOpIC::HEAP_NUMBER 2655 if (result_type_ >= (op_ == Token::DIV) ? BinaryOpIC::HEAP_NUMBER
2656 : TRBinaryOpIC::INT32) { 2656 : BinaryOpIC::INT32) {
2657 __ bind(&return_heap_number); 2657 __ bind(&return_heap_number);
2658 // We are using vfp registers so r5 is available. 2658 // We are using vfp registers so r5 is available.
2659 heap_number_result = r5; 2659 heap_number_result = r5;
2660 GenerateHeapResultAllocation(masm, 2660 GenerateHeapResultAllocation(masm,
2661 heap_number_result, 2661 heap_number_result,
2662 heap_number_map, 2662 heap_number_map,
2663 scratch1, 2663 scratch1,
2664 scratch2, 2664 scratch2,
2665 &call_runtime); 2665 &call_runtime);
2666 __ sub(r0, heap_number_result, Operand(kHeapObjectTag)); 2666 __ sub(r0, heap_number_result, Operand(kHeapObjectTag));
(...skipping 89 matching lines...) Expand 10 before | Expand all | Expand 10 after
2756 __ and_(r2, r2, Operand(0x1f)); 2756 __ and_(r2, r2, Operand(0x1f));
2757 __ mov(r2, Operand(r3, LSR, r2), SetCC); 2757 __ mov(r2, Operand(r3, LSR, r2), SetCC);
2758 // SHR is special because it is required to produce a positive answer. 2758 // SHR is special because it is required to produce a positive answer.
2759 // We only get a negative result if the shift value (r2) is 0. 2759 // We only get a negative result if the shift value (r2) is 0.
2760 // This result cannot be respresented as a signed 32-bit integer, try 2760 // This result cannot be respresented as a signed 32-bit integer, try
2761 // to return a heap number if we can. 2761 // to return a heap number if we can.
2762 // The non vfp3 code does not support this special case, so jump to 2762 // The non vfp3 code does not support this special case, so jump to
2763 // runtime if we don't support it. 2763 // runtime if we don't support it.
2764 if (CpuFeatures::IsSupported(VFP3)) { 2764 if (CpuFeatures::IsSupported(VFP3)) {
2765 __ b(mi, 2765 __ b(mi,
2766 (result_type_ <= TRBinaryOpIC::INT32) ? &transition 2766 (result_type_ <= BinaryOpIC::INT32) ? &transition
Søren Thygesen Gjesse 2011/05/24 11:33:11 Indentation.
fschneider 2011/05/24 12:16:41 Done.
2767 : &return_heap_number); 2767 : &return_heap_number);
2768 } else { 2768 } else {
2769 __ b(mi, (result_type_ <= TRBinaryOpIC::INT32) ? &transition 2769 __ b(mi, (result_type_ <= BinaryOpIC::INT32) ? &transition
Søren Thygesen Gjesse 2011/05/24 11:33:11 Indentation.
fschneider 2011/05/24 12:16:41 Done.
2770 : &call_runtime); 2770 : &call_runtime);
2771 } 2771 }
2772 break; 2772 break;
2773 case Token::SHL: 2773 case Token::SHL:
2774 __ and_(r2, r2, Operand(0x1f)); 2774 __ and_(r2, r2, Operand(0x1f));
2775 __ mov(r2, Operand(r3, LSL, r2)); 2775 __ mov(r2, Operand(r3, LSL, r2));
2776 break; 2776 break;
2777 default: 2777 default:
2778 UNREACHABLE(); 2778 UNREACHABLE();
2779 } 2779 }
(...skipping 50 matching lines...) Expand 10 before | Expand all | Expand 10 after
2830 if (transition.is_linked()) { 2830 if (transition.is_linked()) {
2831 __ bind(&transition); 2831 __ bind(&transition);
2832 GenerateTypeTransition(masm); 2832 GenerateTypeTransition(masm);
2833 } 2833 }
2834 2834
2835 __ bind(&call_runtime); 2835 __ bind(&call_runtime);
2836 GenerateCallRuntime(masm); 2836 GenerateCallRuntime(masm);
2837 } 2837 }
2838 2838
2839 2839
2840 void TypeRecordingBinaryOpStub::GenerateOddballStub(MacroAssembler* masm) { 2840 void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) {
2841 Label call_runtime; 2841 Label call_runtime;
2842 2842
2843 if (op_ == Token::ADD) { 2843 if (op_ == Token::ADD) {
2844 // Handle string addition here, because it is the only operation 2844 // Handle string addition here, because it is the only operation
2845 // that does not do a ToNumber conversion on the operands. 2845 // that does not do a ToNumber conversion on the operands.
2846 GenerateAddStrings(masm); 2846 GenerateAddStrings(masm);
2847 } 2847 }
2848 2848
2849 // Convert oddball arguments to numbers. 2849 // Convert oddball arguments to numbers.
2850 Label check, done; 2850 Label check, done;
(...skipping 12 matching lines...) Expand all
2863 __ mov(r0, Operand(Smi::FromInt(0))); 2863 __ mov(r0, Operand(Smi::FromInt(0)));
2864 } else { 2864 } else {
2865 __ LoadRoot(r0, Heap::kNanValueRootIndex); 2865 __ LoadRoot(r0, Heap::kNanValueRootIndex);
2866 } 2866 }
2867 __ bind(&done); 2867 __ bind(&done);
2868 2868
2869 GenerateHeapNumberStub(masm); 2869 GenerateHeapNumberStub(masm);
2870 } 2870 }
2871 2871
2872 2872
2873 void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { 2873 void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
2874 Label call_runtime; 2874 Label call_runtime;
2875 GenerateFPOperation(masm, false, &call_runtime, &call_runtime); 2875 GenerateFPOperation(masm, false, &call_runtime, &call_runtime);
2876 2876
2877 __ bind(&call_runtime); 2877 __ bind(&call_runtime);
2878 GenerateCallRuntime(masm); 2878 GenerateCallRuntime(masm);
2879 } 2879 }
2880 2880
2881 2881
2882 void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) { 2882 void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
2883 Label call_runtime, call_string_add_or_runtime; 2883 Label call_runtime, call_string_add_or_runtime;
2884 2884
2885 GenerateSmiCode(masm, &call_runtime, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); 2885 GenerateSmiCode(masm, &call_runtime, &call_runtime, ALLOW_HEAPNUMBER_RESULTS);
2886 2886
2887 GenerateFPOperation(masm, false, &call_string_add_or_runtime, &call_runtime); 2887 GenerateFPOperation(masm, false, &call_string_add_or_runtime, &call_runtime);
2888 2888
2889 __ bind(&call_string_add_or_runtime); 2889 __ bind(&call_string_add_or_runtime);
2890 if (op_ == Token::ADD) { 2890 if (op_ == Token::ADD) {
2891 GenerateAddStrings(masm); 2891 GenerateAddStrings(masm);
2892 } 2892 }
2893 2893
2894 __ bind(&call_runtime); 2894 __ bind(&call_runtime);
2895 GenerateCallRuntime(masm); 2895 GenerateCallRuntime(masm);
2896 } 2896 }
2897 2897
2898 2898
2899 void TypeRecordingBinaryOpStub::GenerateAddStrings(MacroAssembler* masm) { 2899 void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) {
2900 ASSERT(op_ == Token::ADD); 2900 ASSERT(op_ == Token::ADD);
2901 Label left_not_string, call_runtime; 2901 Label left_not_string, call_runtime;
2902 2902
2903 Register left = r1; 2903 Register left = r1;
2904 Register right = r0; 2904 Register right = r0;
2905 2905
2906 // Check if left argument is a string. 2906 // Check if left argument is a string.
2907 __ JumpIfSmi(left, &left_not_string); 2907 __ JumpIfSmi(left, &left_not_string);
2908 __ CompareObjectType(left, r2, r2, FIRST_NONSTRING_TYPE); 2908 __ CompareObjectType(left, r2, r2, FIRST_NONSTRING_TYPE);
2909 __ b(ge, &left_not_string); 2909 __ b(ge, &left_not_string);
(...skipping 10 matching lines...) Expand all
2920 2920
2921 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); 2921 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB);
2922 GenerateRegisterArgsPush(masm); 2922 GenerateRegisterArgsPush(masm);
2923 __ TailCallStub(&string_add_right_stub); 2923 __ TailCallStub(&string_add_right_stub);
2924 2924
2925 // At least one argument is not a string. 2925 // At least one argument is not a string.
2926 __ bind(&call_runtime); 2926 __ bind(&call_runtime);
2927 } 2927 }
2928 2928
2929 2929
2930 void TypeRecordingBinaryOpStub::GenerateCallRuntime(MacroAssembler* masm) { 2930 void BinaryOpStub::GenerateCallRuntime(MacroAssembler* masm) {
2931 GenerateRegisterArgsPush(masm); 2931 GenerateRegisterArgsPush(masm);
2932 switch (op_) { 2932 switch (op_) {
2933 case Token::ADD: 2933 case Token::ADD:
2934 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION); 2934 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION);
2935 break; 2935 break;
2936 case Token::SUB: 2936 case Token::SUB:
2937 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION); 2937 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION);
2938 break; 2938 break;
2939 case Token::MUL: 2939 case Token::MUL:
2940 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION); 2940 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION);
(...skipping 21 matching lines...) Expand all
2962 break; 2962 break;
2963 case Token::SHL: 2963 case Token::SHL:
2964 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION); 2964 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION);
2965 break; 2965 break;
2966 default: 2966 default:
2967 UNREACHABLE(); 2967 UNREACHABLE();
2968 } 2968 }
2969 } 2969 }
2970 2970
2971 2971
2972 void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation( 2972 void BinaryOpStub::GenerateHeapResultAllocation(
Søren Thygesen Gjesse 2011/05/24 11:33:11 Fits full indentation.
fschneider 2011/05/24 12:16:41 Done.
2973 MacroAssembler* masm, 2973 MacroAssembler* masm,
2974 Register result, 2974 Register result,
2975 Register heap_number_map, 2975 Register heap_number_map,
2976 Register scratch1, 2976 Register scratch1,
2977 Register scratch2, 2977 Register scratch2,
2978 Label* gc_required) { 2978 Label* gc_required) {
2979 2979
2980 // Code below will scratch result if allocation fails. To keep both arguments 2980 // Code below will scratch result if allocation fails. To keep both arguments
2981 // intact for the runtime call result cannot be one of these. 2981 // intact for the runtime call result cannot be one of these.
2982 ASSERT(!result.is(r0) && !result.is(r1)); 2982 ASSERT(!result.is(r0) && !result.is(r1));
(...skipping 13 matching lines...) Expand all
2996 __ mov(result, Operand(overwritable_operand)); 2996 __ mov(result, Operand(overwritable_operand));
2997 __ bind(&allocated); 2997 __ bind(&allocated);
2998 } else { 2998 } else {
2999 ASSERT(mode_ == NO_OVERWRITE); 2999 ASSERT(mode_ == NO_OVERWRITE);
3000 __ AllocateHeapNumber( 3000 __ AllocateHeapNumber(
3001 result, scratch1, scratch2, heap_number_map, gc_required); 3001 result, scratch1, scratch2, heap_number_map, gc_required);
3002 } 3002 }
3003 } 3003 }
3004 3004
3005 3005
3006 void TypeRecordingBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { 3006 void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
3007 __ Push(r1, r0); 3007 __ Push(r1, r0);
3008 } 3008 }
3009 3009
3010 3010
3011 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { 3011 void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
3012 // Untagged case: double input in d2, double result goes 3012 // Untagged case: double input in d2, double result goes
3013 // into d2. 3013 // into d2.
3014 // Tagged case: tagged input on top of stack and in r0, 3014 // Tagged case: tagged input on top of stack and in r0,
3015 // tagged result (heap number) goes into r0. 3015 // tagged result (heap number) goes into r0.
3016 3016
(...skipping 3372 matching lines...) Expand 10 before | Expand all | Expand 10 after
6389 __ mov(result, Operand(0)); 6389 __ mov(result, Operand(0));
6390 __ Ret(); 6390 __ Ret();
6391 } 6391 }
6392 6392
6393 6393
6394 #undef __ 6394 #undef __
6395 6395
6396 } } // namespace v8::internal 6396 } } // namespace v8::internal
6397 6397
6398 #endif // V8_TARGET_ARCH_ARM 6398 #endif // V8_TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698