| OLD | NEW |
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1789 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 1800 // If length is not zero, "tos_" contains a non-zero value ==> true. | 1800 // If length is not zero, "tos_" contains a non-zero value ==> true. |
| 1801 __ Ret(); | 1801 __ Ret(); |
| 1802 | 1802 |
| 1803 // Return 0 in "tos_" for false. | 1803 // Return 0 in "tos_" for false. |
| 1804 __ bind(&false_result); | 1804 __ bind(&false_result); |
| 1805 __ mov(tos_, zero_reg); | 1805 __ mov(tos_, zero_reg); |
| 1806 __ Ret(); | 1806 __ Ret(); |
| 1807 } | 1807 } |
| 1808 | 1808 |
| 1809 | 1809 |
| 1810 Handle<Code> GetTypeRecordingUnaryOpStub(int key, | 1810 Handle<Code> GetUnaryOpStub(int key, UnaryOpIC::TypeInfo type_info) { |
| 1811 TRUnaryOpIC::TypeInfo type_info) { | 1811 UnaryOpStub stub(key, type_info); |
| 1812 TypeRecordingUnaryOpStub stub(key, type_info); | |
| 1813 return stub.GetCode(); | 1812 return stub.GetCode(); |
| 1814 } | 1813 } |
| 1815 | 1814 |
| 1816 | 1815 |
| 1817 const char* TypeRecordingUnaryOpStub::GetName() { | 1816 const char* UnaryOpStub::GetName() { |
| 1818 if (name_ != NULL) return name_; | 1817 if (name_ != NULL) return name_; |
| 1819 const int kMaxNameLength = 100; | 1818 const int kMaxNameLength = 100; |
| 1820 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( | 1819 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( |
| 1821 kMaxNameLength); | 1820 kMaxNameLength); |
| 1822 if (name_ == NULL) return "OOM"; | 1821 if (name_ == NULL) return "OOM"; |
| 1823 const char* op_name = Token::Name(op_); | 1822 const char* op_name = Token::Name(op_); |
| 1824 const char* overwrite_name = NULL; // Make g++ happy. | 1823 const char* overwrite_name = NULL; // Make g++ happy. |
| 1825 switch (mode_) { | 1824 switch (mode_) { |
| 1826 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; | 1825 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; |
| 1827 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; | 1826 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; |
| 1828 } | 1827 } |
| 1829 | 1828 |
| 1830 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), | 1829 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), |
| 1831 "TypeRecordingUnaryOpStub_%s_%s_%s", | 1830 "UnaryOpStub_%s_%s_%s", |
| 1832 op_name, | 1831 op_name, |
| 1833 overwrite_name, | 1832 overwrite_name, |
| 1834 TRUnaryOpIC::GetName(operand_type_)); | 1833 UnaryOpIC::GetName(operand_type_)); |
| 1835 return name_; | 1834 return name_; |
| 1836 } | 1835 } |
| 1837 | 1836 |
| 1838 | 1837 |
| 1839 // TODO(svenpanne): Use virtual functions instead of switch. | 1838 // TODO(svenpanne): Use virtual functions instead of switch. |
| 1840 void TypeRecordingUnaryOpStub::Generate(MacroAssembler* masm) { | 1839 void UnaryOpStub::Generate(MacroAssembler* masm) { |
| 1841 switch (operand_type_) { | 1840 switch (operand_type_) { |
| 1842 case TRUnaryOpIC::UNINITIALIZED: | 1841 case UnaryOpIC::UNINITIALIZED: |
| 1843 GenerateTypeTransition(masm); | 1842 GenerateTypeTransition(masm); |
| 1844 break; | 1843 break; |
| 1845 case TRUnaryOpIC::SMI: | 1844 case UnaryOpIC::SMI: |
| 1846 GenerateSmiStub(masm); | 1845 GenerateSmiStub(masm); |
| 1847 break; | 1846 break; |
| 1848 case TRUnaryOpIC::HEAP_NUMBER: | 1847 case UnaryOpIC::HEAP_NUMBER: |
| 1849 GenerateHeapNumberStub(masm); | 1848 GenerateHeapNumberStub(masm); |
| 1850 break; | 1849 break; |
| 1851 case TRUnaryOpIC::GENERIC: | 1850 case UnaryOpIC::GENERIC: |
| 1852 GenerateGenericStub(masm); | 1851 GenerateGenericStub(masm); |
| 1853 break; | 1852 break; |
| 1854 } | 1853 } |
| 1855 } | 1854 } |
| 1856 | 1855 |
| 1857 | 1856 |
| 1858 void TypeRecordingUnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { | 1857 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
| 1859 // Argument is in a0 and v0 at this point, so we can overwrite a0. | 1858 // Argument is in a0 and v0 at this point, so we can overwrite a0. |
| 1860 // Push this stub's key. Although the operation and the type info are | 1859 // Push this stub's key. Although the operation and the type info are |
| 1861 // encoded into the key, the encoding is opaque, so push them too. | 1860 // encoded into the key, the encoding is opaque, so push them too. |
| 1862 __ li(a2, Operand(Smi::FromInt(MinorKey()))); | 1861 __ li(a2, Operand(Smi::FromInt(MinorKey()))); |
| 1863 __ li(a1, Operand(Smi::FromInt(op_))); | 1862 __ li(a1, Operand(Smi::FromInt(op_))); |
| 1864 __ li(a0, Operand(Smi::FromInt(operand_type_))); | 1863 __ li(a0, Operand(Smi::FromInt(operand_type_))); |
| 1865 | 1864 |
| 1866 __ Push(v0, a2, a1, a0); | 1865 __ Push(v0, a2, a1, a0); |
| 1867 | 1866 |
| 1868 __ TailCallExternalReference( | 1867 __ TailCallExternalReference( |
| 1869 ExternalReference(IC_Utility(IC::kTypeRecordingUnaryOp_Patch), | 1868 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), |
| 1870 masm->isolate()), | 1869 masm->isolate()), |
| 1871 4, | 1870 4, |
| 1872 1); | 1871 1); |
| 1873 } | 1872 } |
| 1874 | 1873 |
| 1875 | 1874 |
| 1876 // TODO(svenpanne): Use virtual functions instead of switch. | 1875 // TODO(svenpanne): Use virtual functions instead of switch. |
| 1877 void TypeRecordingUnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { | 1876 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { |
| 1878 switch (op_) { | 1877 switch (op_) { |
| 1879 case Token::SUB: | 1878 case Token::SUB: |
| 1880 GenerateSmiStubSub(masm); | 1879 GenerateSmiStubSub(masm); |
| 1881 break; | 1880 break; |
| 1882 case Token::BIT_NOT: | 1881 case Token::BIT_NOT: |
| 1883 GenerateSmiStubBitNot(masm); | 1882 GenerateSmiStubBitNot(masm); |
| 1884 break; | 1883 break; |
| 1885 default: | 1884 default: |
| 1886 UNREACHABLE(); | 1885 UNREACHABLE(); |
| 1887 } | 1886 } |
| 1888 } | 1887 } |
| 1889 | 1888 |
| 1890 | 1889 |
| 1891 void TypeRecordingUnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { | 1890 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { |
| 1892 Label non_smi, slow; | 1891 Label non_smi, slow; |
| 1893 GenerateSmiCodeSub(masm, &non_smi, &slow); | 1892 GenerateSmiCodeSub(masm, &non_smi, &slow); |
| 1894 __ bind(&non_smi); | 1893 __ bind(&non_smi); |
| 1895 __ bind(&slow); | 1894 __ bind(&slow); |
| 1896 GenerateTypeTransition(masm); | 1895 GenerateTypeTransition(masm); |
| 1897 } | 1896 } |
| 1898 | 1897 |
| 1899 | 1898 |
| 1900 void TypeRecordingUnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { | 1899 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { |
| 1901 Label non_smi; | 1900 Label non_smi; |
| 1902 GenerateSmiCodeBitNot(masm, &non_smi); | 1901 GenerateSmiCodeBitNot(masm, &non_smi); |
| 1903 __ bind(&non_smi); | 1902 __ bind(&non_smi); |
| 1904 GenerateTypeTransition(masm); | 1903 GenerateTypeTransition(masm); |
| 1905 } | 1904 } |
| 1906 | 1905 |
| 1907 | 1906 |
| 1908 void TypeRecordingUnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, | 1907 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, |
| 1909 Label* non_smi, | 1908 Label* non_smi, |
| 1910 Label* slow) { | 1909 Label* slow) { |
| 1911 __ JumpIfNotSmi(a0, non_smi); | 1910 __ JumpIfNotSmi(a0, non_smi); |
| 1912 | 1911 |
| 1913 // The result of negating zero or the smallest negative smi is not a smi. | 1912 // The result of negating zero or the smallest negative smi is not a smi. |
| 1914 __ And(t0, a0, ~0x80000000); | 1913 __ And(t0, a0, ~0x80000000); |
| 1915 __ Branch(slow, eq, t0, Operand(zero_reg)); | 1914 __ Branch(slow, eq, t0, Operand(zero_reg)); |
| 1916 | 1915 |
| 1917 // Return '0 - value'. | 1916 // Return '0 - value'. |
| 1918 __ Subu(v0, zero_reg, a0); | 1917 __ Subu(v0, zero_reg, a0); |
| 1919 __ Ret(); | 1918 __ Ret(); |
| 1920 } | 1919 } |
| 1921 | 1920 |
| 1922 | 1921 |
| 1923 void TypeRecordingUnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm, | 1922 void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm, |
| 1924 Label* non_smi) { | 1923 Label* non_smi) { |
| 1925 __ JumpIfNotSmi(a0, non_smi); | 1924 __ JumpIfNotSmi(a0, non_smi); |
| 1926 | 1925 |
| 1927 // Flip bits and revert inverted smi-tag. | 1926 // Flip bits and revert inverted smi-tag. |
| 1928 __ Neg(v0, a0); | 1927 __ Neg(v0, a0); |
| 1929 __ And(v0, v0, ~kSmiTagMask); | 1928 __ And(v0, v0, ~kSmiTagMask); |
| 1930 __ Ret(); | 1929 __ Ret(); |
| 1931 } | 1930 } |
| 1932 | 1931 |
| 1933 | 1932 |
| 1934 // TODO(svenpanne): Use virtual functions instead of switch. | 1933 // TODO(svenpanne): Use virtual functions instead of switch. |
| 1935 void TypeRecordingUnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { | 1934 void UnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
| 1936 switch (op_) { | 1935 switch (op_) { |
| 1937 case Token::SUB: | 1936 case Token::SUB: |
| 1938 GenerateHeapNumberStubSub(masm); | 1937 GenerateHeapNumberStubSub(masm); |
| 1939 break; | 1938 break; |
| 1940 case Token::BIT_NOT: | 1939 case Token::BIT_NOT: |
| 1941 GenerateHeapNumberStubBitNot(masm); | 1940 GenerateHeapNumberStubBitNot(masm); |
| 1942 break; | 1941 break; |
| 1943 default: | 1942 default: |
| 1944 UNREACHABLE(); | 1943 UNREACHABLE(); |
| 1945 } | 1944 } |
| 1946 } | 1945 } |
| 1947 | 1946 |
| 1948 | 1947 |
| 1949 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { | 1948 void UnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { |
| 1950 Label non_smi, slow, call_builtin; | 1949 Label non_smi, slow, call_builtin; |
| 1951 GenerateSmiCodeSub(masm, &non_smi, &call_builtin); | 1950 GenerateSmiCodeSub(masm, &non_smi, &call_builtin); |
| 1952 __ bind(&non_smi); | 1951 __ bind(&non_smi); |
| 1953 GenerateHeapNumberCodeSub(masm, &slow); | 1952 GenerateHeapNumberCodeSub(masm, &slow); |
| 1954 __ bind(&slow); | 1953 __ bind(&slow); |
| 1955 GenerateTypeTransition(masm); | 1954 GenerateTypeTransition(masm); |
| 1956 __ bind(&call_builtin); | 1955 __ bind(&call_builtin); |
| 1957 GenerateGenericCodeFallback(masm); | 1956 GenerateGenericCodeFallback(masm); |
| 1958 } | 1957 } |
| 1959 | 1958 |
| 1960 | 1959 |
| 1961 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot( | 1960 void UnaryOpStub::GenerateHeapNumberStubBitNot(MacroAssembler* masm) { |
| 1962 MacroAssembler* masm) { | |
| 1963 Label non_smi, slow; | 1961 Label non_smi, slow; |
| 1964 GenerateSmiCodeBitNot(masm, &non_smi); | 1962 GenerateSmiCodeBitNot(masm, &non_smi); |
| 1965 __ bind(&non_smi); | 1963 __ bind(&non_smi); |
| 1966 GenerateHeapNumberCodeBitNot(masm, &slow); | 1964 GenerateHeapNumberCodeBitNot(masm, &slow); |
| 1967 __ bind(&slow); | 1965 __ bind(&slow); |
| 1968 GenerateTypeTransition(masm); | 1966 GenerateTypeTransition(masm); |
| 1969 } | 1967 } |
| 1970 | 1968 |
| 1971 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, | 1969 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, |
| 1972 Label* slow) { | 1970 Label* slow) { |
| 1973 EmitCheckForHeapNumber(masm, a0, a1, t2, slow); | 1971 EmitCheckForHeapNumber(masm, a0, a1, t2, slow); |
| 1974 // a0 is a heap number. Get a new heap number in a1. | 1972 // a0 is a heap number. Get a new heap number in a1. |
| 1975 if (mode_ == UNARY_OVERWRITE) { | 1973 if (mode_ == UNARY_OVERWRITE) { |
| 1976 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset)); | 1974 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset)); |
| 1977 __ Xor(a2, a2, Operand(HeapNumber::kSignMask)); // Flip sign. | 1975 __ Xor(a2, a2, Operand(HeapNumber::kSignMask)); // Flip sign. |
| 1978 __ sw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset)); | 1976 __ sw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset)); |
| 1979 } else { | 1977 } else { |
| 1980 Label slow_allocate_heapnumber, heapnumber_allocated; | 1978 Label slow_allocate_heapnumber, heapnumber_allocated; |
| 1981 __ AllocateHeapNumber(a1, a2, a3, t2, &slow_allocate_heapnumber); | 1979 __ AllocateHeapNumber(a1, a2, a3, t2, &slow_allocate_heapnumber); |
| 1982 __ jmp(&heapnumber_allocated); | 1980 __ jmp(&heapnumber_allocated); |
| (...skipping 11 matching lines...) Expand all Loading... |
| 1994 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset)); | 1992 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset)); |
| 1995 __ sw(a3, FieldMemOperand(a1, HeapNumber::kMantissaOffset)); | 1993 __ sw(a3, FieldMemOperand(a1, HeapNumber::kMantissaOffset)); |
| 1996 __ Xor(a2, a2, Operand(HeapNumber::kSignMask)); // Flip sign. | 1994 __ Xor(a2, a2, Operand(HeapNumber::kSignMask)); // Flip sign. |
| 1997 __ sw(a2, FieldMemOperand(a1, HeapNumber::kExponentOffset)); | 1995 __ sw(a2, FieldMemOperand(a1, HeapNumber::kExponentOffset)); |
| 1998 __ mov(v0, a1); | 1996 __ mov(v0, a1); |
| 1999 } | 1997 } |
| 2000 __ Ret(); | 1998 __ Ret(); |
| 2001 } | 1999 } |
| 2002 | 2000 |
| 2003 | 2001 |
| 2004 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeBitNot( | 2002 void UnaryOpStub::GenerateHeapNumberCodeBitNot( |
| 2005 MacroAssembler* masm, Label* slow) { | 2003 MacroAssembler* masm, |
| 2004 Label* slow) { |
| 2006 EmitCheckForHeapNumber(masm, a0, a1, t2, slow); | 2005 EmitCheckForHeapNumber(masm, a0, a1, t2, slow); |
| 2007 // Convert the heap number in a0 to an untagged integer in a1. | 2006 // Convert the heap number in a0 to an untagged integer in a1. |
| 2008 __ ConvertToInt32(a0, a1, a2, a3, f0, slow); | 2007 __ ConvertToInt32(a0, a1, a2, a3, f0, slow); |
| 2009 | 2008 |
| 2010 // Do the bitwise operation and check if the result fits in a smi. | 2009 // Do the bitwise operation and check if the result fits in a smi. |
| 2011 Label try_float; | 2010 Label try_float; |
| 2012 __ Neg(a1, a1); | 2011 __ Neg(a1, a1); |
| 2013 __ Addu(a2, a1, Operand(0x40000000)); | 2012 __ Addu(a2, a1, Operand(0x40000000)); |
| 2014 __ Branch(&try_float, lt, a2, Operand(zero_reg)); | 2013 __ Branch(&try_float, lt, a2, Operand(zero_reg)); |
| 2015 | 2014 |
| (...skipping 28 matching lines...) Expand all Loading... |
| 2044 } else { | 2043 } else { |
| 2045 // WriteInt32ToHeapNumberStub does not trigger GC, so we do not | 2044 // WriteInt32ToHeapNumberStub does not trigger GC, so we do not |
| 2046 // have to set up a frame. | 2045 // have to set up a frame. |
| 2047 WriteInt32ToHeapNumberStub stub(a1, v0, a2, a3); | 2046 WriteInt32ToHeapNumberStub stub(a1, v0, a2, a3); |
| 2048 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | 2047 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); |
| 2049 } | 2048 } |
| 2050 } | 2049 } |
| 2051 | 2050 |
| 2052 | 2051 |
| 2053 // TODO(svenpanne): Use virtual functions instead of switch. | 2052 // TODO(svenpanne): Use virtual functions instead of switch. |
| 2054 void TypeRecordingUnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { | 2053 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { |
| 2055 switch (op_) { | 2054 switch (op_) { |
| 2056 case Token::SUB: | 2055 case Token::SUB: |
| 2057 GenerateGenericStubSub(masm); | 2056 GenerateGenericStubSub(masm); |
| 2058 break; | 2057 break; |
| 2059 case Token::BIT_NOT: | 2058 case Token::BIT_NOT: |
| 2060 GenerateGenericStubBitNot(masm); | 2059 GenerateGenericStubBitNot(masm); |
| 2061 break; | 2060 break; |
| 2062 default: | 2061 default: |
| 2063 UNREACHABLE(); | 2062 UNREACHABLE(); |
| 2064 } | 2063 } |
| 2065 } | 2064 } |
| 2066 | 2065 |
| 2067 | 2066 |
| 2068 void TypeRecordingUnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { | 2067 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { |
| 2069 Label non_smi, slow; | 2068 Label non_smi, slow; |
| 2070 GenerateSmiCodeSub(masm, &non_smi, &slow); | 2069 GenerateSmiCodeSub(masm, &non_smi, &slow); |
| 2071 __ bind(&non_smi); | 2070 __ bind(&non_smi); |
| 2072 GenerateHeapNumberCodeSub(masm, &slow); | 2071 GenerateHeapNumberCodeSub(masm, &slow); |
| 2073 __ bind(&slow); | 2072 __ bind(&slow); |
| 2074 GenerateGenericCodeFallback(masm); | 2073 GenerateGenericCodeFallback(masm); |
| 2075 } | 2074 } |
| 2076 | 2075 |
| 2077 | 2076 |
| 2078 void TypeRecordingUnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { | 2077 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { |
| 2079 Label non_smi, slow; | 2078 Label non_smi, slow; |
| 2080 GenerateSmiCodeBitNot(masm, &non_smi); | 2079 GenerateSmiCodeBitNot(masm, &non_smi); |
| 2081 __ bind(&non_smi); | 2080 __ bind(&non_smi); |
| 2082 GenerateHeapNumberCodeBitNot(masm, &slow); | 2081 GenerateHeapNumberCodeBitNot(masm, &slow); |
| 2083 __ bind(&slow); | 2082 __ bind(&slow); |
| 2084 GenerateGenericCodeFallback(masm); | 2083 GenerateGenericCodeFallback(masm); |
| 2085 } | 2084 } |
| 2086 | 2085 |
| 2087 | 2086 |
| 2088 void TypeRecordingUnaryOpStub::GenerateGenericCodeFallback( | 2087 void UnaryOpStub::GenerateGenericCodeFallback( |
| 2089 MacroAssembler* masm) { | 2088 MacroAssembler* masm) { |
| 2090 // Handle the slow case by jumping to the JavaScript builtin. | 2089 // Handle the slow case by jumping to the JavaScript builtin. |
| 2091 __ push(a0); | 2090 __ push(a0); |
| 2092 switch (op_) { | 2091 switch (op_) { |
| 2093 case Token::SUB: | 2092 case Token::SUB: |
| 2094 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); | 2093 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION); |
| 2095 break; | 2094 break; |
| 2096 case Token::BIT_NOT: | 2095 case Token::BIT_NOT: |
| 2097 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); | 2096 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION); |
| 2098 break; | 2097 break; |
| 2099 default: | 2098 default: |
| 2100 UNREACHABLE(); | 2099 UNREACHABLE(); |
| 2101 } | 2100 } |
| 2102 } | 2101 } |
| 2103 | 2102 |
| 2104 | 2103 |
| 2105 Handle<Code> GetTypeRecordingBinaryOpStub(int key, | 2104 Handle<Code> GetBinaryOpStub(int key, |
| 2106 TRBinaryOpIC::TypeInfo type_info, | 2105 BinaryOpIC::TypeInfo type_info, |
| 2107 TRBinaryOpIC::TypeInfo result_type_info) { | 2106 BinaryOpIC::TypeInfo result_type_info) { |
| 2108 TypeRecordingBinaryOpStub stub(key, type_info, result_type_info); | 2107 BinaryOpStub stub(key, type_info, result_type_info); |
| 2109 return stub.GetCode(); | 2108 return stub.GetCode(); |
| 2110 } | 2109 } |
| 2111 | 2110 |
| 2112 | 2111 |
| 2113 void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { | 2112 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
| 2114 Label get_result; | 2113 Label get_result; |
| 2115 | 2114 |
| 2116 __ Push(a1, a0); | 2115 __ Push(a1, a0); |
| 2117 | 2116 |
| 2118 __ li(a2, Operand(Smi::FromInt(MinorKey()))); | 2117 __ li(a2, Operand(Smi::FromInt(MinorKey()))); |
| 2119 __ li(a1, Operand(Smi::FromInt(op_))); | 2118 __ li(a1, Operand(Smi::FromInt(op_))); |
| 2120 __ li(a0, Operand(Smi::FromInt(operands_type_))); | 2119 __ li(a0, Operand(Smi::FromInt(operands_type_))); |
| 2121 __ Push(a2, a1, a0); | 2120 __ Push(a2, a1, a0); |
| 2122 | 2121 |
| 2123 __ TailCallExternalReference( | 2122 __ TailCallExternalReference( |
| 2124 ExternalReference(IC_Utility(IC::kTypeRecordingBinaryOp_Patch), | 2123 ExternalReference(IC_Utility(IC::kBinaryOp_Patch), |
| 2125 masm->isolate()), | 2124 masm->isolate()), |
| 2126 5, | 2125 5, |
| 2127 1); | 2126 1); |
| 2128 } | 2127 } |
| 2129 | 2128 |
| 2130 | 2129 |
| 2131 void TypeRecordingBinaryOpStub::GenerateTypeTransitionWithSavedArgs( | 2130 void BinaryOpStub::GenerateTypeTransitionWithSavedArgs( |
| 2132 MacroAssembler* masm) { | 2131 MacroAssembler* masm) { |
| 2133 UNIMPLEMENTED(); | 2132 UNIMPLEMENTED(); |
| 2134 } | 2133 } |
| 2135 | 2134 |
| 2136 | 2135 |
| 2137 void TypeRecordingBinaryOpStub::Generate(MacroAssembler* masm) { | 2136 void BinaryOpStub::Generate(MacroAssembler* masm) { |
| 2138 switch (operands_type_) { | 2137 switch (operands_type_) { |
| 2139 case TRBinaryOpIC::UNINITIALIZED: | 2138 case BinaryOpIC::UNINITIALIZED: |
| 2140 GenerateTypeTransition(masm); | 2139 GenerateTypeTransition(masm); |
| 2141 break; | 2140 break; |
| 2142 case TRBinaryOpIC::SMI: | 2141 case BinaryOpIC::SMI: |
| 2143 GenerateSmiStub(masm); | 2142 GenerateSmiStub(masm); |
| 2144 break; | 2143 break; |
| 2145 case TRBinaryOpIC::INT32: | 2144 case BinaryOpIC::INT32: |
| 2146 GenerateInt32Stub(masm); | 2145 GenerateInt32Stub(masm); |
| 2147 break; | 2146 break; |
| 2148 case TRBinaryOpIC::HEAP_NUMBER: | 2147 case BinaryOpIC::HEAP_NUMBER: |
| 2149 GenerateHeapNumberStub(masm); | 2148 GenerateHeapNumberStub(masm); |
| 2150 break; | 2149 break; |
| 2151 case TRBinaryOpIC::ODDBALL: | 2150 case BinaryOpIC::ODDBALL: |
| 2152 GenerateOddballStub(masm); | 2151 GenerateOddballStub(masm); |
| 2153 break; | 2152 break; |
| 2154 case TRBinaryOpIC::BOTH_STRING: | 2153 case BinaryOpIC::BOTH_STRING: |
| 2155 GenerateBothStringStub(masm); | 2154 GenerateBothStringStub(masm); |
| 2156 break; | 2155 break; |
| 2157 case TRBinaryOpIC::STRING: | 2156 case BinaryOpIC::STRING: |
| 2158 GenerateStringStub(masm); | 2157 GenerateStringStub(masm); |
| 2159 break; | 2158 break; |
| 2160 case TRBinaryOpIC::GENERIC: | 2159 case BinaryOpIC::GENERIC: |
| 2161 GenerateGeneric(masm); | 2160 GenerateGeneric(masm); |
| 2162 break; | 2161 break; |
| 2163 default: | 2162 default: |
| 2164 UNREACHABLE(); | 2163 UNREACHABLE(); |
| 2165 } | 2164 } |
| 2166 } | 2165 } |
| 2167 | 2166 |
| 2168 | 2167 |
| 2169 const char* TypeRecordingBinaryOpStub::GetName() { | 2168 const char* BinaryOpStub::GetName() { |
| 2170 if (name_ != NULL) return name_; | 2169 if (name_ != NULL) return name_; |
| 2171 const int kMaxNameLength = 100; | 2170 const int kMaxNameLength = 100; |
| 2172 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( | 2171 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( |
| 2173 kMaxNameLength); | 2172 kMaxNameLength); |
| 2174 if (name_ == NULL) return "OOM"; | 2173 if (name_ == NULL) return "OOM"; |
| 2175 const char* op_name = Token::Name(op_); | 2174 const char* op_name = Token::Name(op_); |
| 2176 const char* overwrite_name; | 2175 const char* overwrite_name; |
| 2177 switch (mode_) { | 2176 switch (mode_) { |
| 2178 case NO_OVERWRITE: overwrite_name = "Alloc"; break; | 2177 case NO_OVERWRITE: overwrite_name = "Alloc"; break; |
| 2179 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break; | 2178 case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break; |
| 2180 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break; | 2179 case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break; |
| 2181 default: overwrite_name = "UnknownOverwrite"; break; | 2180 default: overwrite_name = "UnknownOverwrite"; break; |
| 2182 } | 2181 } |
| 2183 | 2182 |
| 2184 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), | 2183 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), |
| 2185 "TypeRecordingBinaryOpStub_%s_%s_%s", | 2184 "BinaryOpStub_%s_%s_%s", |
| 2186 op_name, | 2185 op_name, |
| 2187 overwrite_name, | 2186 overwrite_name, |
| 2188 TRBinaryOpIC::GetName(operands_type_)); | 2187 BinaryOpIC::GetName(operands_type_)); |
| 2189 return name_; | 2188 return name_; |
| 2190 } | 2189 } |
| 2191 | 2190 |
| 2192 | 2191 |
| 2193 | 2192 |
| 2194 void TypeRecordingBinaryOpStub::GenerateSmiSmiOperation( | 2193 void BinaryOpStub::GenerateSmiSmiOperation(MacroAssembler* masm) { |
| 2195 MacroAssembler* masm) { | |
| 2196 Register left = a1; | 2194 Register left = a1; |
| 2197 Register right = a0; | 2195 Register right = a0; |
| 2198 | 2196 |
| 2199 Register scratch1 = t0; | 2197 Register scratch1 = t0; |
| 2200 Register scratch2 = t1; | 2198 Register scratch2 = t1; |
| 2201 | 2199 |
| 2202 ASSERT(right.is(a0)); | 2200 ASSERT(right.is(a0)); |
| 2203 STATIC_ASSERT(kSmiTag == 0); | 2201 STATIC_ASSERT(kSmiTag == 0); |
| 2204 | 2202 |
| 2205 Label not_smi_result; | 2203 Label not_smi_result; |
| (...skipping 133 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2339 __ SmiTag(v0, scratch1); | 2337 __ SmiTag(v0, scratch1); |
| 2340 __ Ret(); | 2338 __ Ret(); |
| 2341 break; | 2339 break; |
| 2342 default: | 2340 default: |
| 2343 UNREACHABLE(); | 2341 UNREACHABLE(); |
| 2344 } | 2342 } |
| 2345 __ bind(¬_smi_result); | 2343 __ bind(¬_smi_result); |
| 2346 } | 2344 } |
| 2347 | 2345 |
| 2348 | 2346 |
| 2349 void TypeRecordingBinaryOpStub::GenerateFPOperation(MacroAssembler* masm, | 2347 void BinaryOpStub::GenerateFPOperation(MacroAssembler* masm, |
| 2350 bool smi_operands, | 2348 bool smi_operands, |
| 2351 Label* not_numbers, | 2349 Label* not_numbers, |
| 2352 Label* gc_required) { | 2350 Label* gc_required) { |
| 2353 Register left = a1; | 2351 Register left = a1; |
| 2354 Register right = a0; | 2352 Register right = a0; |
| 2355 Register scratch1 = t3; | 2353 Register scratch1 = t3; |
| 2356 Register scratch2 = t5; | 2354 Register scratch2 = t5; |
| 2357 Register scratch3 = t0; | 2355 Register scratch3 = t0; |
| 2358 | 2356 |
| 2359 ASSERT(smi_operands || (not_numbers != NULL)); | 2357 ASSERT(smi_operands || (not_numbers != NULL)); |
| 2360 if (smi_operands && FLAG_debug_code) { | 2358 if (smi_operands && FLAG_debug_code) { |
| 2361 __ AbortIfNotSmi(left); | 2359 __ AbortIfNotSmi(left); |
| 2362 __ AbortIfNotSmi(right); | 2360 __ AbortIfNotSmi(right); |
| (...skipping 192 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2555 default: | 2553 default: |
| 2556 UNREACHABLE(); | 2554 UNREACHABLE(); |
| 2557 } | 2555 } |
| 2558 } | 2556 } |
| 2559 | 2557 |
| 2560 | 2558 |
| 2561 // Generate the smi code. If the operation on smis are successful this return is | 2559 // Generate the smi code. If the operation on smis are successful this return is |
| 2562 // generated. If the result is not a smi and heap number allocation is not | 2560 // generated. If the result is not a smi and heap number allocation is not |
| 2563 // requested the code falls through. If number allocation is requested but a | 2561 // requested the code falls through. If number allocation is requested but a |
| 2564 // heap number cannot be allocated the code jumps to the lable gc_required. | 2562 // heap number cannot be allocated the code jumps to the lable gc_required. |
| 2565 void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, | 2563 void BinaryOpStub::GenerateSmiCode( |
| 2564 MacroAssembler* masm, |
| 2566 Label* use_runtime, | 2565 Label* use_runtime, |
| 2567 Label* gc_required, | 2566 Label* gc_required, |
| 2568 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) { | 2567 SmiCodeGenerateHeapNumberResults allow_heapnumber_results) { |
| 2569 Label not_smis; | 2568 Label not_smis; |
| 2570 | 2569 |
| 2571 Register left = a1; | 2570 Register left = a1; |
| 2572 Register right = a0; | 2571 Register right = a0; |
| 2573 Register scratch1 = t3; | 2572 Register scratch1 = t3; |
| 2574 Register scratch2 = t5; | 2573 Register scratch2 = t5; |
| 2575 | 2574 |
| 2576 // Perform combined smi check on both operands. | 2575 // Perform combined smi check on both operands. |
| 2577 __ Or(scratch1, left, Operand(right)); | 2576 __ Or(scratch1, left, Operand(right)); |
| 2578 STATIC_ASSERT(kSmiTag == 0); | 2577 STATIC_ASSERT(kSmiTag == 0); |
| 2579 __ JumpIfNotSmi(scratch1, ¬_smis); | 2578 __ JumpIfNotSmi(scratch1, ¬_smis); |
| 2580 | 2579 |
| 2581 // If the smi-smi operation results in a smi return is generated. | 2580 // If the smi-smi operation results in a smi return is generated. |
| 2582 GenerateSmiSmiOperation(masm); | 2581 GenerateSmiSmiOperation(masm); |
| 2583 | 2582 |
| 2584 // If heap number results are possible generate the result in an allocated | 2583 // If heap number results are possible generate the result in an allocated |
| 2585 // heap number. | 2584 // heap number. |
| 2586 if (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) { | 2585 if (allow_heapnumber_results == ALLOW_HEAPNUMBER_RESULTS) { |
| 2587 GenerateFPOperation(masm, true, use_runtime, gc_required); | 2586 GenerateFPOperation(masm, true, use_runtime, gc_required); |
| 2588 } | 2587 } |
| 2589 __ bind(¬_smis); | 2588 __ bind(¬_smis); |
| 2590 } | 2589 } |
| 2591 | 2590 |
| 2592 | 2591 |
| 2593 void TypeRecordingBinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { | 2592 void BinaryOpStub::GenerateSmiStub(MacroAssembler* masm) { |
| 2594 Label not_smis, call_runtime; | 2593 Label not_smis, call_runtime; |
| 2595 | 2594 |
| 2596 if (result_type_ == TRBinaryOpIC::UNINITIALIZED || | 2595 if (result_type_ == BinaryOpIC::UNINITIALIZED || |
| 2597 result_type_ == TRBinaryOpIC::SMI) { | 2596 result_type_ == BinaryOpIC::SMI) { |
| 2598 // Only allow smi results. | 2597 // Only allow smi results. |
| 2599 GenerateSmiCode(masm, &call_runtime, NULL, NO_HEAPNUMBER_RESULTS); | 2598 GenerateSmiCode(masm, &call_runtime, NULL, NO_HEAPNUMBER_RESULTS); |
| 2600 } else { | 2599 } else { |
| 2601 // Allow heap number result and don't make a transition if a heap number | 2600 // Allow heap number result and don't make a transition if a heap number |
| 2602 // cannot be allocated. | 2601 // cannot be allocated. |
| 2603 GenerateSmiCode(masm, | 2602 GenerateSmiCode(masm, |
| 2604 &call_runtime, | 2603 &call_runtime, |
| 2605 &call_runtime, | 2604 &call_runtime, |
| 2606 ALLOW_HEAPNUMBER_RESULTS); | 2605 ALLOW_HEAPNUMBER_RESULTS); |
| 2607 } | 2606 } |
| 2608 | 2607 |
| 2609 // Code falls through if the result is not returned as either a smi or heap | 2608 // Code falls through if the result is not returned as either a smi or heap |
| 2610 // number. | 2609 // number. |
| 2611 GenerateTypeTransition(masm); | 2610 GenerateTypeTransition(masm); |
| 2612 | 2611 |
| 2613 __ bind(&call_runtime); | 2612 __ bind(&call_runtime); |
| 2614 GenerateCallRuntime(masm); | 2613 GenerateCallRuntime(masm); |
| 2615 } | 2614 } |
| 2616 | 2615 |
| 2617 | 2616 |
| 2618 void TypeRecordingBinaryOpStub::GenerateStringStub(MacroAssembler* masm) { | 2617 void BinaryOpStub::GenerateStringStub(MacroAssembler* masm) { |
| 2619 ASSERT(operands_type_ == TRBinaryOpIC::STRING); | 2618 ASSERT(operands_type_ == BinaryOpIC::STRING); |
| 2620 // Try to add arguments as strings, otherwise, transition to the generic | 2619 // Try to add arguments as strings, otherwise, transition to the generic |
| 2621 // TRBinaryOpIC type. | 2620 // BinaryOpIC type. |
| 2622 GenerateAddStrings(masm); | 2621 GenerateAddStrings(masm); |
| 2623 GenerateTypeTransition(masm); | 2622 GenerateTypeTransition(masm); |
| 2624 } | 2623 } |
| 2625 | 2624 |
| 2626 | 2625 |
| 2627 void TypeRecordingBinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) { | 2626 void BinaryOpStub::GenerateBothStringStub(MacroAssembler* masm) { |
| 2628 Label call_runtime; | 2627 Label call_runtime; |
| 2629 ASSERT(operands_type_ == TRBinaryOpIC::BOTH_STRING); | 2628 ASSERT(operands_type_ == BinaryOpIC::BOTH_STRING); |
| 2630 ASSERT(op_ == Token::ADD); | 2629 ASSERT(op_ == Token::ADD); |
| 2631 // If both arguments are strings, call the string add stub. | 2630 // If both arguments are strings, call the string add stub. |
| 2632 // Otherwise, do a transition. | 2631 // Otherwise, do a transition. |
| 2633 | 2632 |
| 2634 // Registers containing left and right operands respectively. | 2633 // Registers containing left and right operands respectively. |
| 2635 Register left = a1; | 2634 Register left = a1; |
| 2636 Register right = a0; | 2635 Register right = a0; |
| 2637 | 2636 |
| 2638 // Test if left operand is a string. | 2637 // Test if left operand is a string. |
| 2639 __ JumpIfSmi(left, &call_runtime); | 2638 __ JumpIfSmi(left, &call_runtime); |
| 2640 __ GetObjectType(left, a2, a2); | 2639 __ GetObjectType(left, a2, a2); |
| 2641 __ Branch(&call_runtime, ge, a2, Operand(FIRST_NONSTRING_TYPE)); | 2640 __ Branch(&call_runtime, ge, a2, Operand(FIRST_NONSTRING_TYPE)); |
| 2642 | 2641 |
| 2643 // Test if right operand is a string. | 2642 // Test if right operand is a string. |
| 2644 __ JumpIfSmi(right, &call_runtime); | 2643 __ JumpIfSmi(right, &call_runtime); |
| 2645 __ GetObjectType(right, a2, a2); | 2644 __ GetObjectType(right, a2, a2); |
| 2646 __ Branch(&call_runtime, ge, a2, Operand(FIRST_NONSTRING_TYPE)); | 2645 __ Branch(&call_runtime, ge, a2, Operand(FIRST_NONSTRING_TYPE)); |
| 2647 | 2646 |
| 2648 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB); | 2647 StringAddStub string_add_stub(NO_STRING_CHECK_IN_STUB); |
| 2649 GenerateRegisterArgsPush(masm); | 2648 GenerateRegisterArgsPush(masm); |
| 2650 __ TailCallStub(&string_add_stub); | 2649 __ TailCallStub(&string_add_stub); |
| 2651 | 2650 |
| 2652 __ bind(&call_runtime); | 2651 __ bind(&call_runtime); |
| 2653 GenerateTypeTransition(masm); | 2652 GenerateTypeTransition(masm); |
| 2654 } | 2653 } |
| 2655 | 2654 |
| 2656 | 2655 |
| 2657 void TypeRecordingBinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) { | 2656 void BinaryOpStub::GenerateInt32Stub(MacroAssembler* masm) { |
| 2658 ASSERT(operands_type_ == TRBinaryOpIC::INT32); | 2657 ASSERT(operands_type_ == BinaryOpIC::INT32); |
| 2659 | 2658 |
| 2660 Register left = a1; | 2659 Register left = a1; |
| 2661 Register right = a0; | 2660 Register right = a0; |
| 2662 Register scratch1 = t3; | 2661 Register scratch1 = t3; |
| 2663 Register scratch2 = t5; | 2662 Register scratch2 = t5; |
| 2664 FPURegister double_scratch = f0; | 2663 FPURegister double_scratch = f0; |
| 2665 FPURegister single_scratch = f6; | 2664 FPURegister single_scratch = f6; |
| 2666 | 2665 |
| 2667 Register heap_number_result = no_reg; | 2666 Register heap_number_result = no_reg; |
| 2668 Register heap_number_map = t2; | 2667 Register heap_number_map = t2; |
| (...skipping 88 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2757 __ trunc_w_d(single_scratch, f10); | 2756 __ trunc_w_d(single_scratch, f10); |
| 2758 // Retrieve FCSR. | 2757 // Retrieve FCSR. |
| 2759 __ cfc1(scratch2, FCSR); | 2758 __ cfc1(scratch2, FCSR); |
| 2760 // Restore FCSR. | 2759 // Restore FCSR. |
| 2761 __ ctc1(scratch1, FCSR); | 2760 __ ctc1(scratch1, FCSR); |
| 2762 | 2761 |
| 2763 // Check for inexact conversion. | 2762 // Check for inexact conversion. |
| 2764 __ srl(scratch2, scratch2, kFCSRFlagShift); | 2763 __ srl(scratch2, scratch2, kFCSRFlagShift); |
| 2765 __ And(scratch2, scratch2, kFCSRFlagMask); | 2764 __ And(scratch2, scratch2, kFCSRFlagMask); |
| 2766 | 2765 |
| 2767 if (result_type_ <= TRBinaryOpIC::INT32) { | 2766 if (result_type_ <= BinaryOpIC::INT32) { |
| 2768 // If scratch2 != 0, result does not fit in a 32-bit integer. | 2767 // If scratch2 != 0, result does not fit in a 32-bit integer. |
| 2769 __ Branch(&transition, ne, scratch2, Operand(zero_reg)); | 2768 __ Branch(&transition, ne, scratch2, Operand(zero_reg)); |
| 2770 } | 2769 } |
| 2771 | 2770 |
| 2772 // Check if the result fits in a smi. | 2771 // Check if the result fits in a smi. |
| 2773 __ mfc1(scratch1, single_scratch); | 2772 __ mfc1(scratch1, single_scratch); |
| 2774 __ Addu(scratch2, scratch1, Operand(0x40000000)); | 2773 __ Addu(scratch2, scratch1, Operand(0x40000000)); |
| 2775 // If not try to return a heap number. | 2774 // If not try to return a heap number. |
| 2776 __ Branch(&return_heap_number, lt, scratch2, Operand(zero_reg)); | 2775 __ Branch(&return_heap_number, lt, scratch2, Operand(zero_reg)); |
| 2777 // Check for minus zero. Return heap number for minus zero. | 2776 // Check for minus zero. Return heap number for minus zero. |
| 2778 Label not_zero; | 2777 Label not_zero; |
| 2779 __ Branch(¬_zero, ne, scratch1, Operand(zero_reg)); | 2778 __ Branch(¬_zero, ne, scratch1, Operand(zero_reg)); |
| 2780 __ mfc1(scratch2, f11); | 2779 __ mfc1(scratch2, f11); |
| 2781 __ And(scratch2, scratch2, HeapNumber::kSignMask); | 2780 __ And(scratch2, scratch2, HeapNumber::kSignMask); |
| 2782 __ Branch(&return_heap_number, ne, scratch2, Operand(zero_reg)); | 2781 __ Branch(&return_heap_number, ne, scratch2, Operand(zero_reg)); |
| 2783 __ bind(¬_zero); | 2782 __ bind(¬_zero); |
| 2784 | 2783 |
| 2785 // Tag the result and return. | 2784 // Tag the result and return. |
| 2786 __ SmiTag(v0, scratch1); | 2785 __ SmiTag(v0, scratch1); |
| 2787 __ Ret(); | 2786 __ Ret(); |
| 2788 } else { | 2787 } else { |
| 2789 // DIV just falls through to allocating a heap number. | 2788 // DIV just falls through to allocating a heap number. |
| 2790 } | 2789 } |
| 2791 | 2790 |
| 2792 if (result_type_ >= (op_ == Token::DIV) ? TRBinaryOpIC::HEAP_NUMBER | 2791 if (result_type_ >= (op_ == Token::DIV) ? BinaryOpIC::HEAP_NUMBER |
| 2793 : TRBinaryOpIC::INT32) { | 2792 : BinaryOpIC::INT32) { |
| 2794 __ bind(&return_heap_number); | 2793 __ bind(&return_heap_number); |
| 2795 // We are using FPU registers so s0 is available. | 2794 // We are using FPU registers so s0 is available. |
| 2796 heap_number_result = s0; | 2795 heap_number_result = s0; |
| 2797 GenerateHeapResultAllocation(masm, | 2796 GenerateHeapResultAllocation(masm, |
| 2798 heap_number_result, | 2797 heap_number_result, |
| 2799 heap_number_map, | 2798 heap_number_map, |
| 2800 scratch1, | 2799 scratch1, |
| 2801 scratch2, | 2800 scratch2, |
| 2802 &call_runtime); | 2801 &call_runtime); |
| 2803 __ mov(v0, heap_number_result); | 2802 __ mov(v0, heap_number_result); |
| (...skipping 87 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2891 case Token::SHR: | 2890 case Token::SHR: |
| 2892 __ And(a2, a2, Operand(0x1f)); | 2891 __ And(a2, a2, Operand(0x1f)); |
| 2893 __ srlv(a2, a3, a2); | 2892 __ srlv(a2, a3, a2); |
| 2894 // SHR is special because it is required to produce a positive answer. | 2893 // SHR is special because it is required to produce a positive answer. |
| 2895 // We only get a negative result if the shift value (a2) is 0. | 2894 // We only get a negative result if the shift value (a2) is 0. |
| 2896 // This result cannot be respresented as a signed 32-bit integer, try | 2895 // This result cannot be respresented as a signed 32-bit integer, try |
| 2897 // to return a heap number if we can. | 2896 // to return a heap number if we can. |
| 2898 // The non FPU code does not support this special case, so jump to | 2897 // The non FPU code does not support this special case, so jump to |
| 2899 // runtime if we don't support it. | 2898 // runtime if we don't support it. |
| 2900 if (CpuFeatures::IsSupported(FPU)) { | 2899 if (CpuFeatures::IsSupported(FPU)) { |
| 2901 __ Branch((result_type_ <= TRBinaryOpIC::INT32) | 2900 __ Branch((result_type_ <= BinaryOpIC::INT32) |
| 2902 ? &transition | 2901 ? &transition |
| 2903 : &return_heap_number, | 2902 : &return_heap_number, |
| 2904 lt, | 2903 lt, |
| 2905 a2, | 2904 a2, |
| 2906 Operand(zero_reg)); | 2905 Operand(zero_reg)); |
| 2907 } else { | 2906 } else { |
| 2908 __ Branch((result_type_ <= TRBinaryOpIC::INT32) | 2907 __ Branch((result_type_ <= BinaryOpIC::INT32) |
| 2909 ? &transition | 2908 ? &transition |
| 2910 : &call_runtime, | 2909 : &call_runtime, |
| 2911 lt, | 2910 lt, |
| 2912 a2, | 2911 a2, |
| 2913 Operand(zero_reg)); | 2912 Operand(zero_reg)); |
| 2914 } | 2913 } |
| 2915 break; | 2914 break; |
| 2916 case Token::SHL: | 2915 case Token::SHL: |
| 2917 __ And(a2, a2, Operand(0x1f)); | 2916 __ And(a2, a2, Operand(0x1f)); |
| 2918 __ sllv(a2, a3, a2); | 2917 __ sllv(a2, a3, a2); |
| (...skipping 54 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 2973 if (transition.is_linked()) { | 2972 if (transition.is_linked()) { |
| 2974 __ bind(&transition); | 2973 __ bind(&transition); |
| 2975 GenerateTypeTransition(masm); | 2974 GenerateTypeTransition(masm); |
| 2976 } | 2975 } |
| 2977 | 2976 |
| 2978 __ bind(&call_runtime); | 2977 __ bind(&call_runtime); |
| 2979 GenerateCallRuntime(masm); | 2978 GenerateCallRuntime(masm); |
| 2980 } | 2979 } |
| 2981 | 2980 |
| 2982 | 2981 |
| 2983 void TypeRecordingBinaryOpStub::GenerateOddballStub(MacroAssembler* masm) { | 2982 void BinaryOpStub::GenerateOddballStub(MacroAssembler* masm) { |
| 2984 Label call_runtime; | 2983 Label call_runtime; |
| 2985 | 2984 |
| 2986 if (op_ == Token::ADD) { | 2985 if (op_ == Token::ADD) { |
| 2987 // Handle string addition here, because it is the only operation | 2986 // Handle string addition here, because it is the only operation |
| 2988 // that does not do a ToNumber conversion on the operands. | 2987 // that does not do a ToNumber conversion on the operands. |
| 2989 GenerateAddStrings(masm); | 2988 GenerateAddStrings(masm); |
| 2990 } | 2989 } |
| 2991 | 2990 |
| 2992 // Convert oddball arguments to numbers. | 2991 // Convert oddball arguments to numbers. |
| 2993 Label check, done; | 2992 Label check, done; |
| (...skipping 12 matching lines...) Expand all Loading... |
| 3006 __ li(a0, Operand(Smi::FromInt(0))); | 3005 __ li(a0, Operand(Smi::FromInt(0))); |
| 3007 } else { | 3006 } else { |
| 3008 __ LoadRoot(a0, Heap::kNanValueRootIndex); | 3007 __ LoadRoot(a0, Heap::kNanValueRootIndex); |
| 3009 } | 3008 } |
| 3010 __ bind(&done); | 3009 __ bind(&done); |
| 3011 | 3010 |
| 3012 GenerateHeapNumberStub(masm); | 3011 GenerateHeapNumberStub(masm); |
| 3013 } | 3012 } |
| 3014 | 3013 |
| 3015 | 3014 |
| 3016 void TypeRecordingBinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { | 3015 void BinaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { |
| 3017 Label call_runtime; | 3016 Label call_runtime; |
| 3018 GenerateFPOperation(masm, false, &call_runtime, &call_runtime); | 3017 GenerateFPOperation(masm, false, &call_runtime, &call_runtime); |
| 3019 | 3018 |
| 3020 __ bind(&call_runtime); | 3019 __ bind(&call_runtime); |
| 3021 GenerateCallRuntime(masm); | 3020 GenerateCallRuntime(masm); |
| 3022 } | 3021 } |
| 3023 | 3022 |
| 3024 | 3023 |
| 3025 void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) { | 3024 void BinaryOpStub::GenerateGeneric(MacroAssembler* masm) { |
| 3026 Label call_runtime, call_string_add_or_runtime; | 3025 Label call_runtime, call_string_add_or_runtime; |
| 3027 | 3026 |
| 3028 GenerateSmiCode(masm, &call_runtime, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); | 3027 GenerateSmiCode(masm, &call_runtime, &call_runtime, ALLOW_HEAPNUMBER_RESULTS); |
| 3029 | 3028 |
| 3030 GenerateFPOperation(masm, false, &call_string_add_or_runtime, &call_runtime); | 3029 GenerateFPOperation(masm, false, &call_string_add_or_runtime, &call_runtime); |
| 3031 | 3030 |
| 3032 __ bind(&call_string_add_or_runtime); | 3031 __ bind(&call_string_add_or_runtime); |
| 3033 if (op_ == Token::ADD) { | 3032 if (op_ == Token::ADD) { |
| 3034 GenerateAddStrings(masm); | 3033 GenerateAddStrings(masm); |
| 3035 } | 3034 } |
| 3036 | 3035 |
| 3037 __ bind(&call_runtime); | 3036 __ bind(&call_runtime); |
| 3038 GenerateCallRuntime(masm); | 3037 GenerateCallRuntime(masm); |
| 3039 } | 3038 } |
| 3040 | 3039 |
| 3041 | 3040 |
| 3042 void TypeRecordingBinaryOpStub::GenerateAddStrings(MacroAssembler* masm) { | 3041 void BinaryOpStub::GenerateAddStrings(MacroAssembler* masm) { |
| 3043 ASSERT(op_ == Token::ADD); | 3042 ASSERT(op_ == Token::ADD); |
| 3044 Label left_not_string, call_runtime; | 3043 Label left_not_string, call_runtime; |
| 3045 | 3044 |
| 3046 Register left = a1; | 3045 Register left = a1; |
| 3047 Register right = a0; | 3046 Register right = a0; |
| 3048 | 3047 |
| 3049 // Check if left argument is a string. | 3048 // Check if left argument is a string. |
| 3050 __ JumpIfSmi(left, &left_not_string); | 3049 __ JumpIfSmi(left, &left_not_string); |
| 3051 __ GetObjectType(left, a2, a2); | 3050 __ GetObjectType(left, a2, a2); |
| 3052 __ Branch(&left_not_string, ge, a2, Operand(FIRST_NONSTRING_TYPE)); | 3051 __ Branch(&left_not_string, ge, a2, Operand(FIRST_NONSTRING_TYPE)); |
| (...skipping 10 matching lines...) Expand all Loading... |
| 3063 | 3062 |
| 3064 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); | 3063 StringAddStub string_add_right_stub(NO_STRING_CHECK_RIGHT_IN_STUB); |
| 3065 GenerateRegisterArgsPush(masm); | 3064 GenerateRegisterArgsPush(masm); |
| 3066 __ TailCallStub(&string_add_right_stub); | 3065 __ TailCallStub(&string_add_right_stub); |
| 3067 | 3066 |
| 3068 // At least one argument is not a string. | 3067 // At least one argument is not a string. |
| 3069 __ bind(&call_runtime); | 3068 __ bind(&call_runtime); |
| 3070 } | 3069 } |
| 3071 | 3070 |
| 3072 | 3071 |
| 3073 void TypeRecordingBinaryOpStub::GenerateCallRuntime(MacroAssembler* masm) { | 3072 void BinaryOpStub::GenerateCallRuntime(MacroAssembler* masm) { |
| 3074 GenerateRegisterArgsPush(masm); | 3073 GenerateRegisterArgsPush(masm); |
| 3075 switch (op_) { | 3074 switch (op_) { |
| 3076 case Token::ADD: | 3075 case Token::ADD: |
| 3077 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION); | 3076 __ InvokeBuiltin(Builtins::ADD, JUMP_FUNCTION); |
| 3078 break; | 3077 break; |
| 3079 case Token::SUB: | 3078 case Token::SUB: |
| 3080 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION); | 3079 __ InvokeBuiltin(Builtins::SUB, JUMP_FUNCTION); |
| 3081 break; | 3080 break; |
| 3082 case Token::MUL: | 3081 case Token::MUL: |
| 3083 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION); | 3082 __ InvokeBuiltin(Builtins::MUL, JUMP_FUNCTION); |
| (...skipping 21 matching lines...) Expand all Loading... |
| 3105 break; | 3104 break; |
| 3106 case Token::SHL: | 3105 case Token::SHL: |
| 3107 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION); | 3106 __ InvokeBuiltin(Builtins::SHL, JUMP_FUNCTION); |
| 3108 break; | 3107 break; |
| 3109 default: | 3108 default: |
| 3110 UNREACHABLE(); | 3109 UNREACHABLE(); |
| 3111 } | 3110 } |
| 3112 } | 3111 } |
| 3113 | 3112 |
| 3114 | 3113 |
| 3115 void TypeRecordingBinaryOpStub::GenerateHeapResultAllocation( | 3114 void BinaryOpStub::GenerateHeapResultAllocation( |
| 3116 MacroAssembler* masm, | 3115 MacroAssembler* masm, |
| 3117 Register result, | 3116 Register result, |
| 3118 Register heap_number_map, | 3117 Register heap_number_map, |
| 3119 Register scratch1, | 3118 Register scratch1, |
| 3120 Register scratch2, | 3119 Register scratch2, |
| 3121 Label* gc_required) { | 3120 Label* gc_required) { |
| 3122 | 3121 |
| 3123 // Code below will scratch result if allocation fails. To keep both arguments | 3122 // Code below will scratch result if allocation fails. To keep both arguments |
| 3124 // intact for the runtime call result cannot be one of these. | 3123 // intact for the runtime call result cannot be one of these. |
| 3125 ASSERT(!result.is(a0) && !result.is(a1)); | 3124 ASSERT(!result.is(a0) && !result.is(a1)); |
| (...skipping 13 matching lines...) Expand all Loading... |
| 3139 __ mov(result, overwritable_operand); | 3138 __ mov(result, overwritable_operand); |
| 3140 __ bind(&allocated); | 3139 __ bind(&allocated); |
| 3141 } else { | 3140 } else { |
| 3142 ASSERT(mode_ == NO_OVERWRITE); | 3141 ASSERT(mode_ == NO_OVERWRITE); |
| 3143 __ AllocateHeapNumber( | 3142 __ AllocateHeapNumber( |
| 3144 result, scratch1, scratch2, heap_number_map, gc_required); | 3143 result, scratch1, scratch2, heap_number_map, gc_required); |
| 3145 } | 3144 } |
| 3146 } | 3145 } |
| 3147 | 3146 |
| 3148 | 3147 |
| 3149 void TypeRecordingBinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { | 3148 void BinaryOpStub::GenerateRegisterArgsPush(MacroAssembler* masm) { |
| 3150 __ Push(a1, a0); | 3149 __ Push(a1, a0); |
| 3151 } | 3150 } |
| 3152 | 3151 |
| 3153 | 3152 |
| 3154 | 3153 |
| 3155 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { | 3154 void TranscendentalCacheStub::Generate(MacroAssembler* masm) { |
| 3156 // Untagged case: double input in f4, double result goes | 3155 // Untagged case: double input in f4, double result goes |
| 3157 // into f4. | 3156 // into f4. |
| 3158 // Tagged case: tagged input on top of stack and in a0, | 3157 // Tagged case: tagged input on top of stack and in a0, |
| 3159 // tagged result (heap number) goes into v0. | 3158 // tagged result (heap number) goes into v0. |
| (...skipping 3488 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
| 6648 __ mov(result, zero_reg); | 6647 __ mov(result, zero_reg); |
| 6649 __ Ret(); | 6648 __ Ret(); |
| 6650 } | 6649 } |
| 6651 | 6650 |
| 6652 | 6651 |
| 6653 #undef __ | 6652 #undef __ |
| 6654 | 6653 |
| 6655 } } // namespace v8::internal | 6654 } } // namespace v8::internal |
| 6656 | 6655 |
| 6657 #endif // V8_TARGET_ARCH_MIPS | 6656 #endif // V8_TARGET_ARCH_MIPS |
| OLD | NEW |