Chromium Code Reviews| OLD | NEW |
|---|---|
| 1 // Copyright 2011 the V8 project authors. All rights reserved. | 1 // Copyright 2011 the V8 project authors. All rights reserved. |
| 2 // Redistribution and use in source and binary forms, with or without | 2 // Redistribution and use in source and binary forms, with or without |
| 3 // modification, are permitted provided that the following conditions are | 3 // modification, are permitted provided that the following conditions are |
| 4 // met: | 4 // met: |
| 5 // | 5 // |
| 6 // * Redistributions of source code must retain the above copyright | 6 // * Redistributions of source code must retain the above copyright |
| 7 // notice, this list of conditions and the following disclaimer. | 7 // notice, this list of conditions and the following disclaimer. |
| 8 // * Redistributions in binary form must reproduce the above | 8 // * Redistributions in binary form must reproduce the above |
| 9 // copyright notice, this list of conditions and the following | 9 // copyright notice, this list of conditions and the following |
| 10 // disclaimer in the documentation and/or other materials provided | 10 // disclaimer in the documentation and/or other materials provided |
| (...skipping 1750 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 1761 // If length is not zero, "tos_" contains a non-zero value ==> true. | 1761 // If length is not zero, "tos_" contains a non-zero value ==> true. |
| 1762 __ Ret(); | 1762 __ Ret(); |
| 1763 | 1763 |
| 1764 // Return 0 in "tos_" for false . | 1764 // Return 0 in "tos_" for false . |
| 1765 __ bind(&false_result); | 1765 __ bind(&false_result); |
| 1766 __ mov(tos_, Operand(0, RelocInfo::NONE)); | 1766 __ mov(tos_, Operand(0, RelocInfo::NONE)); |
| 1767 __ Ret(); | 1767 __ Ret(); |
| 1768 } | 1768 } |
| 1769 | 1769 |
| 1770 | 1770 |
| 1771 Handle<Code> GetTypeRecordingUnaryOpStub(int key, | |
| 1772 TRUnaryOpIC::TypeInfo type_info) { | |
| 1773 TypeRecordingUnaryOpStub stub(key, type_info); | |
| 1774 return stub.GetCode(); | |
| 1775 } | |
| 1776 | |
| 1777 | |
| 1778 const char* TypeRecordingUnaryOpStub::GetName() { | |
| 1779 if (name_ != NULL) return name_; | |
| 1780 const int kMaxNameLength = 100; | |
| 1781 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray( | |
| 1782 kMaxNameLength); | |
| 1783 if (name_ == NULL) return "OOM"; | |
| 1784 const char* op_name = Token::Name(op_); | |
| 1785 const char* overwrite_name; | |
| 1786 switch (mode_) { | |
| 1787 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break; | |
| 1788 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break; | |
| 1789 } | |
| 1790 | |
| 1791 OS::SNPrintF(Vector<char>(name_, kMaxNameLength), | |
| 1792 "TypeRecordingUnaryOpStub_%s_%s_%s", | |
| 1793 op_name, | |
| 1794 overwrite_name, | |
| 1795 TRUnaryOpIC::GetName(operand_type_)); | |
| 1796 return name_; | |
| 1797 } | |
| 1798 | |
| 1799 | |
| 1800 // TODO(svenpanne): Use virtual functions instead of switch. | |
| 1801 void TypeRecordingUnaryOpStub::Generate(MacroAssembler* masm) { | |
| 1802 switch (operand_type_) { | |
| 1803 case TRUnaryOpIC::UNINITIALIZED: | |
| 1804 GenerateTypeTransition(masm); | |
| 1805 break; | |
| 1806 case TRUnaryOpIC::SMI: | |
| 1807 GenerateSmiStub(masm); | |
| 1808 break; | |
| 1809 case TRUnaryOpIC::HEAP_NUMBER: | |
| 1810 GenerateHeapNumberStub(masm); | |
| 1811 break; | |
| 1812 case TRUnaryOpIC::GENERIC: | |
| 1813 GenerateGenericStub(masm); | |
| 1814 break; | |
| 1815 } | |
| 1816 } | |
| 1817 | |
| 1818 | |
| 1819 void TypeRecordingUnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { | |
| 1820 // Prepare to push argument. | |
| 1821 __ mov(r3, Operand(r0)); | |
| 1822 | |
| 1823 // Push this stub's key. Although the operation and the type info are | |
| 1824 // encoded into the key, the encoding is opaque, so push them too. | |
| 1825 __ mov(r2, Operand(Smi::FromInt(MinorKey()))); | |
| 1826 __ mov(r1, Operand(Smi::FromInt(op_))); | |
| 1827 __ mov(r0, Operand(Smi::FromInt(operand_type_))); | |
| 1828 | |
| 1829 __ Push(r3, r2, r1, r0); | |
| 1830 | |
| 1831 __ TailCallExternalReference( | |
| 1832 ExternalReference(IC_Utility(IC::kTypeRecordingUnaryOp_Patch), | |
| 1833 masm->isolate()), | |
| 1834 4, | |
| 1835 1); | |
| 1836 } | |
| 1837 | |
| 1838 | |
| 1839 // TODO(svenpanne): Use virtual functions instead of switch. | |
| 1840 void TypeRecordingUnaryOpStub::GenerateSmiStub(MacroAssembler* masm) { | |
| 1841 switch (op_) { | |
| 1842 case Token::SUB: | |
| 1843 GenerateSmiStubSub(masm); | |
| 1844 break; | |
| 1845 case Token::BIT_NOT: | |
| 1846 GenerateSmiStubBitNot(masm); | |
| 1847 break; | |
| 1848 default: | |
| 1849 UNREACHABLE(); | |
| 1850 } | |
| 1851 } | |
| 1852 | |
| 1853 | |
| 1854 void TypeRecordingUnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) { | |
| 1855 Label non_smi, slow; | |
| 1856 GenerateSmiCodeSub(masm, &non_smi, &slow); | |
| 1857 __ bind(&non_smi); | |
| 1858 __ bind(&slow); | |
| 1859 GenerateTypeTransition(masm); | |
| 1860 } | |
| 1861 | |
| 1862 | |
| 1863 void TypeRecordingUnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) { | |
| 1864 Label non_smi; | |
| 1865 GenerateSmiCodeBitNot(masm, &non_smi); | |
| 1866 __ bind(&non_smi); | |
| 1867 GenerateTypeTransition(masm); | |
| 1868 } | |
| 1869 | |
| 1870 | |
| 1871 void TypeRecordingUnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm, | |
| 1872 Label* non_smi, | |
| 1873 Label* slow) { | |
| 1874 __ JumpIfNotSmi(r0, non_smi); | |
| 1875 | |
| 1876 // The result of negating zero or the smallest negative smi is not a smi. | |
| 1877 __ bic(ip, r0, Operand(0x80000000), SetCC); | |
| 1878 __ b(eq, slow); | |
| 1879 | |
| 1880 // Return '0 - value'. | |
| 1881 __ rsb(r0, r0, Operand(0, RelocInfo::NONE)); | |
| 1882 __ Ret(); | |
| 1883 } | |
| 1884 | |
| 1885 | |
| 1886 void TypeRecordingUnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm, | |
| 1887 Label* non_smi) { | |
| 1888 __ JumpIfNotSmi(r0, non_smi); | |
| 1889 | |
| 1890 // Flip bits and revert inverted smi-tag. | |
| 1891 __ mvn(r0, Operand(r0)); | |
| 1892 __ bic(r0, r0, Operand(kSmiTagMask)); | |
| 1893 __ Ret(); | |
| 1894 } | |
| 1895 | |
| 1896 | |
| 1897 // TODO(svenpanne): Use virtual functions instead of switch. | |
| 1898 void TypeRecordingUnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) { | |
| 1899 switch (op_) { | |
| 1900 case Token::SUB: | |
| 1901 GenerateHeapNumberStubSub(masm); | |
| 1902 break; | |
| 1903 case Token::BIT_NOT: | |
| 1904 GenerateHeapNumberStubBitNot(masm); | |
| 1905 break; | |
| 1906 default: | |
| 1907 UNREACHABLE(); | |
| 1908 } | |
| 1909 } | |
| 1910 | |
| 1911 | |
| 1912 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) { | |
| 1913 Label non_smi, slow; | |
| 1914 GenerateSmiCodeSub(masm, &non_smi, &slow); | |
| 1915 __ bind(&non_smi); | |
| 1916 GenerateHeapNumberCodeSub(masm, &slow); | |
| 1917 __ bind(&slow); | |
| 1918 GenerateTypeTransition(masm); | |
| 1919 } | |
| 1920 | |
| 1921 | |
| 1922 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot( | |
| 1923 MacroAssembler* masm) { | |
| 1924 Label non_smi, slow; | |
| 1925 GenerateSmiCodeBitNot(masm, &non_smi); | |
| 1926 __ bind(&non_smi); | |
| 1927 GenerateHeapNumberCodeBitNot(masm, &slow); | |
| 1928 __ bind(&slow); | |
| 1929 GenerateTypeTransition(masm); | |
| 1930 } | |
| 1931 | |
| 1932 | |
| 1933 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm, | |
| 1934 Label* slow) { | |
| 1935 Register heap_number_map = r6; | |
| 1936 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); | |
| 1937 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); | |
| 1938 __ cmp(r1, heap_number_map); | |
| 1939 __ b(ne, slow); | |
| 1940 | |
| 1941 // r0 is a heap number. Get a new heap number in r1. | |
| 1942 if (mode_ == UNARY_OVERWRITE) { | |
| 1943 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); | |
| 1944 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. | |
| 1945 __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); | |
| 1946 } else { | |
| 1947 __ AllocateHeapNumber(r1, r2, r3, r6, slow); | |
| 1948 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset)); | |
| 1949 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset)); | |
| 1950 __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset)); | |
| 1951 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign. | |
| 1952 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset)); | |
| 1953 __ mov(r0, Operand(r1)); | |
| 1954 } | |
| 1955 __ Ret(); | |
| 1956 } | |
| 1957 | |
| 1958 | |
| 1959 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeBitNot( | |
| 1960 MacroAssembler* masm, Label* slow) { | |
| 1961 Register heap_number_map = r6; | |
| 1962 // Check if the operand is a heap number. | |
| 1963 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); | |
| 1964 __ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex); | |
| 1965 __ cmp(r1, heap_number_map); | |
| 1966 __ b(ne, slow); | |
| 1967 | |
| 1968 // Convert the heap number is r0 to an untagged integer in r1. | |
| 1969 __ ConvertToInt32(r0, r1, r2, r3, d0, slow); | |
| 1970 | |
| 1971 // Do the bitwise operation and check if the result fits in a smi. | |
| 1972 Label try_float; | |
| 1973 __ mvn(r1, Operand(r1)); | |
| 1974 __ add(r2, r1, Operand(0x40000000), SetCC); | |
| 1975 __ b(mi, &try_float); | |
| 1976 | |
| 1977 // Tag the result as a smi and we're done. | |
| 1978 __ mov(r0, Operand(r1, LSL, kSmiTagSize)); | |
| 1979 __ Ret(); | |
| 1980 | |
| 1981 // Try to store the result in a heap number. | |
| 1982 __ bind(&try_float); | |
| 1983 if (mode_ == UNARY_NO_OVERWRITE) { | |
| 1984 Label slow_allocate_heapnumber, heapnumber_allocated; | |
| 1985 __ AllocateHeapNumber(r0, r2, r3, r6, &slow_allocate_heapnumber); | |
| 1986 __ jmp(&heapnumber_allocated); | |
| 1987 | |
| 1988 __ bind(&slow_allocate_heapnumber); | |
|
Sven Panne
2011/04/21 16:19:13
Hmmm, this seems to crash mjsunit/bit-not.js with
| |
| 1989 __ push(r1); | |
| 1990 __ CallRuntime(Runtime::kNumberAlloc, 0); | |
| 1991 __ pop(r1); | |
| 1992 | |
| 1993 __ bind(&heapnumber_allocated); | |
| 1994 | |
| 1995 } | |
| 1996 | |
| 1997 if (CpuFeatures::IsSupported(VFP3)) { | |
| 1998 // Convert the int32 in r1 to the heap number in r0. r2 is corrupted. | |
| 1999 CpuFeatures::Scope scope(VFP3); | |
| 2000 __ vmov(s0, r1); | |
| 2001 __ vcvt_f64_s32(d0, s0); | |
| 2002 __ sub(r2, r0, Operand(kHeapObjectTag)); | |
| 2003 __ vstr(d0, r2, HeapNumber::kValueOffset); | |
| 2004 __ Ret(); | |
| 2005 } else { | |
| 2006 // WriteInt32ToHeapNumberStub does not trigger GC, so we do not | |
| 2007 // have to set up a frame. | |
| 2008 WriteInt32ToHeapNumberStub stub(r1, r0, r2); | |
| 2009 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET); | |
| 2010 } | |
| 2011 } | |
| 2012 | |
| 2013 | |
| 2014 // TODO(svenpanne): Use virtual functions instead of switch. | |
| 2015 void TypeRecordingUnaryOpStub::GenerateGenericStub(MacroAssembler* masm) { | |
| 2016 switch (op_) { | |
| 2017 case Token::SUB: | |
| 2018 GenerateGenericStubSub(masm); | |
| 2019 break; | |
| 2020 case Token::BIT_NOT: | |
| 2021 GenerateGenericStubBitNot(masm); | |
| 2022 break; | |
| 2023 default: | |
| 2024 UNREACHABLE(); | |
| 2025 } | |
| 2026 } | |
| 2027 | |
| 2028 | |
| 2029 void TypeRecordingUnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) { | |
| 2030 Label non_smi, slow; | |
| 2031 GenerateSmiCodeSub(masm, &non_smi, &slow); | |
| 2032 __ bind(&non_smi); | |
| 2033 GenerateHeapNumberCodeSub(masm, &slow); | |
| 2034 __ bind(&slow); | |
| 2035 GenerateGenericCodeFallback(masm); | |
| 2036 } | |
| 2037 | |
| 2038 | |
| 2039 void TypeRecordingUnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) { | |
| 2040 Label non_smi, slow; | |
| 2041 GenerateSmiCodeBitNot(masm, &non_smi); | |
| 2042 __ bind(&non_smi); | |
| 2043 GenerateHeapNumberCodeBitNot(masm, &slow); | |
| 2044 __ bind(&slow); | |
| 2045 GenerateGenericCodeFallback(masm); | |
| 2046 } | |
| 2047 | |
| 2048 | |
| 2049 void TypeRecordingUnaryOpStub::GenerateGenericCodeFallback( | |
| 2050 MacroAssembler* masm) { | |
| 2051 // Handle the slow case by jumping to the JavaScript builtin. | |
| 2052 __ push(r0); | |
| 2053 switch (op_) { | |
| 2054 case Token::SUB: | |
| 2055 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_JS); | |
| 2056 break; | |
| 2057 case Token::BIT_NOT: | |
| 2058 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_JS); | |
| 2059 break; | |
| 2060 default: | |
| 2061 UNREACHABLE(); | |
| 2062 } | |
| 2063 } | |
| 2064 | |
| 2065 | |
| 1771 Handle<Code> GetTypeRecordingBinaryOpStub(int key, | 2066 Handle<Code> GetTypeRecordingBinaryOpStub(int key, |
| 1772 TRBinaryOpIC::TypeInfo type_info, | 2067 TRBinaryOpIC::TypeInfo type_info, |
| 1773 TRBinaryOpIC::TypeInfo result_type_info) { | 2068 TRBinaryOpIC::TypeInfo result_type_info) { |
| 1774 TypeRecordingBinaryOpStub stub(key, type_info, result_type_info); | 2069 TypeRecordingBinaryOpStub stub(key, type_info, result_type_info); |
| 1775 return stub.GetCode(); | 2070 return stub.GetCode(); |
| 1776 } | 2071 } |
| 1777 | 2072 |
| 1778 | 2073 |
| 1779 void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { | 2074 void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { |
| 1780 Label get_result; | 2075 Label get_result; |
| (...skipping 4103 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... | |
| 5884 __ str(pc, MemOperand(sp, 0)); | 6179 __ str(pc, MemOperand(sp, 0)); |
| 5885 __ Jump(target); // Call the C++ function. | 6180 __ Jump(target); // Call the C++ function. |
| 5886 } | 6181 } |
| 5887 | 6182 |
| 5888 | 6183 |
| 5889 #undef __ | 6184 #undef __ |
| 5890 | 6185 |
| 5891 } } // namespace v8::internal | 6186 } } // namespace v8::internal |
| 5892 | 6187 |
| 5893 #endif // V8_TARGET_ARCH_ARM | 6188 #endif // V8_TARGET_ARCH_ARM |
| OLD | NEW |