Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(102)

Side by Side Diff: src/arm/code-stubs-arm.cc

Issue 6879081: Added type recording for unary minus and unary bitwise negation. Note that the (Closed) Base URL: http://v8.googlecode.com/svn/branches/bleeding_edge/
Patch Set: Incorporated Florian's suggested changes Created 9 years, 8 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
OLDNEW
1 // Copyright 2011 the V8 project authors. All rights reserved. 1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
(...skipping 37 matching lines...) Expand 10 before | Expand all | Expand 10 after
48 Register rhs, 48 Register rhs,
49 Label* lhs_not_nan, 49 Label* lhs_not_nan,
50 Label* slow, 50 Label* slow,
51 bool strict); 51 bool strict);
52 static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cond); 52 static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cond);
53 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm, 53 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
54 Register lhs, 54 Register lhs,
55 Register rhs); 55 Register rhs);
56 56
57 57
58 // Check if the operand is a heap number.
59 static void EmitCheckForHeapNumber(MacroAssembler* masm, Register operand,
60 Register scratch1, Register scratch2,
61 Label* not_a_heap_number) {
62 __ ldr(scratch1, FieldMemOperand(operand, HeapObject::kMapOffset));
63 __ LoadRoot(scratch2, Heap::kHeapNumberMapRootIndex);
64 __ cmp(scratch1, scratch2);
65 __ b(ne, not_a_heap_number);
66 }
67
68
58 void ToNumberStub::Generate(MacroAssembler* masm) { 69 void ToNumberStub::Generate(MacroAssembler* masm) {
59 // The ToNumber stub takes one argument in eax. 70 // The ToNumber stub takes one argument in eax.
60 Label check_heap_number, call_builtin; 71 Label check_heap_number, call_builtin;
61 __ tst(r0, Operand(kSmiTagMask)); 72 __ tst(r0, Operand(kSmiTagMask));
62 __ b(ne, &check_heap_number); 73 __ b(ne, &check_heap_number);
63 __ Ret(); 74 __ Ret();
64 75
65 __ bind(&check_heap_number); 76 __ bind(&check_heap_number);
66 __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset)); 77 EmitCheckForHeapNumber(masm, r0, r1, ip, &call_builtin);
67 __ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
68 __ cmp(r1, ip);
69 __ b(ne, &call_builtin);
70 __ Ret(); 78 __ Ret();
71 79
72 __ bind(&call_builtin); 80 __ bind(&call_builtin);
73 __ push(r0); 81 __ push(r0);
74 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_JS); 82 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_JS);
75 } 83 }
76 84
77 85
78 void FastNewClosureStub::Generate(MacroAssembler* masm) { 86 void FastNewClosureStub::Generate(MacroAssembler* masm) {
79 // Create a new closure from the given function info in new 87 // Create a new closure from the given function info in new
(...skipping 1568 matching lines...) Expand 10 before | Expand all | Expand 10 after
1648 // If length is not zero, "tos_" contains a non-zero value ==> true. 1656 // If length is not zero, "tos_" contains a non-zero value ==> true.
1649 __ Ret(); 1657 __ Ret();
1650 1658
1651 // Return 0 in "tos_" for false . 1659 // Return 0 in "tos_" for false .
1652 __ bind(&false_result); 1660 __ bind(&false_result);
1653 __ mov(tos_, Operand(0, RelocInfo::NONE)); 1661 __ mov(tos_, Operand(0, RelocInfo::NONE));
1654 __ Ret(); 1662 __ Ret();
1655 } 1663 }
1656 1664
1657 1665
1666 Handle<Code> GetTypeRecordingUnaryOpStub(int key,
1667 TRUnaryOpIC::TypeInfo type_info) {
1668 TypeRecordingUnaryOpStub stub(key, type_info);
1669 return stub.GetCode();
1670 }
1671
1672
1673 const char* TypeRecordingUnaryOpStub::GetName() {
1674 if (name_ != NULL) return name_;
1675 const int kMaxNameLength = 100;
1676 name_ = Isolate::Current()->bootstrapper()->AllocateAutoDeletedArray(
1677 kMaxNameLength);
1678 if (name_ == NULL) return "OOM";
1679 const char* op_name = Token::Name(op_);
1680 const char* overwrite_name;
1681 switch (mode_) {
1682 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
1683 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
1684 }
1685
1686 OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
1687 "TypeRecordingUnaryOpStub_%s_%s_%s",
1688 op_name,
1689 overwrite_name,
1690 TRUnaryOpIC::GetName(operand_type_));
1691 return name_;
1692 }
1693
1694
1695 // TODO(svenpanne): Use virtual functions instead of switch.
1696 void TypeRecordingUnaryOpStub::Generate(MacroAssembler* masm) {
1697 switch (operand_type_) {
1698 case TRUnaryOpIC::UNINITIALIZED:
1699 GenerateTypeTransition(masm);
1700 break;
1701 case TRUnaryOpIC::SMI:
1702 GenerateSmiStub(masm);
1703 break;
1704 case TRUnaryOpIC::HEAP_NUMBER:
1705 GenerateHeapNumberStub(masm);
1706 break;
1707 case TRUnaryOpIC::GENERIC:
1708 GenerateGenericStub(masm);
1709 break;
1710 }
1711 }
1712
1713
1714 void TypeRecordingUnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1715 // Prepare to push argument.
1716 __ mov(r3, Operand(r0));
1717
1718 // Push this stub's key. Although the operation and the type info are
1719 // encoded into the key, the encoding is opaque, so push them too.
1720 __ mov(r2, Operand(Smi::FromInt(MinorKey())));
1721 __ mov(r1, Operand(Smi::FromInt(op_)));
1722 __ mov(r0, Operand(Smi::FromInt(operand_type_)));
1723
1724 __ Push(r3, r2, r1, r0);
1725
1726 __ TailCallExternalReference(
1727 ExternalReference(IC_Utility(IC::kTypeRecordingUnaryOp_Patch),
1728 masm->isolate()),
1729 4,
1730 1);
1731 }
1732
1733
1734 // TODO(svenpanne): Use virtual functions instead of switch.
1735 void TypeRecordingUnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
1736 switch (op_) {
1737 case Token::SUB:
1738 GenerateSmiStubSub(masm);
1739 break;
1740 case Token::BIT_NOT:
1741 GenerateSmiStubBitNot(masm);
1742 break;
1743 default:
1744 UNREACHABLE();
1745 }
1746 }
1747
1748
1749 void TypeRecordingUnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
1750 Label non_smi, slow;
1751 GenerateSmiCodeSub(masm, &non_smi, &slow);
1752 __ bind(&non_smi);
1753 __ bind(&slow);
1754 GenerateTypeTransition(masm);
1755 }
1756
1757
1758 void TypeRecordingUnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
1759 Label non_smi;
1760 GenerateSmiCodeBitNot(masm, &non_smi);
1761 __ bind(&non_smi);
1762 GenerateTypeTransition(masm);
1763 }
1764
1765
1766 void TypeRecordingUnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
1767 Label* non_smi,
1768 Label* slow) {
1769 __ JumpIfNotSmi(r0, non_smi);
1770
1771 // The result of negating zero or the smallest negative smi is not a smi.
1772 __ bic(ip, r0, Operand(0x80000000), SetCC);
1773 __ b(eq, slow);
1774
1775 // Return '0 - value'.
1776 __ rsb(r0, r0, Operand(0, RelocInfo::NONE));
1777 __ Ret();
1778 }
1779
1780
1781 void TypeRecordingUnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm,
1782 Label* non_smi) {
1783 __ JumpIfNotSmi(r0, non_smi);
1784
1785 // Flip bits and revert inverted smi-tag.
1786 __ mvn(r0, Operand(r0));
1787 __ bic(r0, r0, Operand(kSmiTagMask));
1788 __ Ret();
1789 }
1790
1791
1792 // TODO(svenpanne): Use virtual functions instead of switch.
1793 void TypeRecordingUnaryOpStub::GenerateHeapNumberStub(MacroAssembler* masm) {
1794 switch (op_) {
1795 case Token::SUB:
1796 GenerateHeapNumberStubSub(masm);
1797 break;
1798 case Token::BIT_NOT:
1799 GenerateHeapNumberStubBitNot(masm);
1800 break;
1801 default:
1802 UNREACHABLE();
1803 }
1804 }
1805
1806
1807 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubSub(MacroAssembler* masm) {
1808 Label non_smi, slow;
1809 GenerateSmiCodeSub(masm, &non_smi, &slow);
1810 __ bind(&non_smi);
1811 GenerateHeapNumberCodeSub(masm, &slow);
1812 __ bind(&slow);
1813 GenerateTypeTransition(masm);
1814 }
1815
1816
1817 void TypeRecordingUnaryOpStub::GenerateHeapNumberStubBitNot(
1818 MacroAssembler* masm) {
1819 Label non_smi, slow;
1820 GenerateSmiCodeBitNot(masm, &non_smi);
1821 __ bind(&non_smi);
1822 GenerateHeapNumberCodeBitNot(masm, &slow);
1823 __ bind(&slow);
1824 GenerateTypeTransition(masm);
1825 }
1826
1827 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
1828 Label* slow) {
1829 EmitCheckForHeapNumber(masm, r0, r1, r6, slow);
1830 // r0 is a heap number. Get a new heap number in r1.
1831 if (mode_ == UNARY_OVERWRITE) {
1832 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
1833 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
1834 __ str(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
1835 } else {
1836 Label slow_allocate_heapnumber, heapnumber_allocated;
1837 __ AllocateHeapNumber(r1, r2, r3, r6, &slow_allocate_heapnumber);
1838 __ jmp(&heapnumber_allocated);
1839
1840 __ bind(&slow_allocate_heapnumber);
1841 __ EnterInternalFrame();
1842 __ push(r0);
1843 __ CallRuntime(Runtime::kNumberAlloc, 0);
1844 __ mov(r1, Operand(r0));
1845 __ pop(r0);
1846 __ LeaveInternalFrame();
1847
1848 __ bind(&heapnumber_allocated);
1849 __ ldr(r3, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
1850 __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
1851 __ str(r3, FieldMemOperand(r1, HeapNumber::kMantissaOffset));
1852 __ eor(r2, r2, Operand(HeapNumber::kSignMask)); // Flip sign.
1853 __ str(r2, FieldMemOperand(r1, HeapNumber::kExponentOffset));
1854 __ mov(r0, Operand(r1));
1855 }
1856 __ Ret();
1857 }
1858
1859
1860 void TypeRecordingUnaryOpStub::GenerateHeapNumberCodeBitNot(
1861 MacroAssembler* masm, Label* slow) {
1862 EmitCheckForHeapNumber(masm, r0, r1, r6, slow);
1863 // Convert the heap number is r0 to an untagged integer in r1.
1864 __ ConvertToInt32(r0, r1, r2, r3, d0, slow);
1865
1866 // Do the bitwise operation and check if the result fits in a smi.
1867 Label try_float;
1868 __ mvn(r1, Operand(r1));
1869 __ add(r2, r1, Operand(0x40000000), SetCC);
1870 __ b(mi, &try_float);
1871
1872 // Tag the result as a smi and we're done.
1873 __ mov(r0, Operand(r1, LSL, kSmiTagSize));
1874 __ Ret();
1875
1876 // Try to store the result in a heap number.
1877 __ bind(&try_float);
1878 if (mode_ == UNARY_NO_OVERWRITE) {
1879 Label slow_allocate_heapnumber, heapnumber_allocated;
1880 __ AllocateHeapNumber(r0, r2, r3, r6, &slow_allocate_heapnumber);
1881 __ jmp(&heapnumber_allocated);
1882
1883 __ bind(&slow_allocate_heapnumber);
1884 __ EnterInternalFrame();
1885 __ push(r1);
1886 __ CallRuntime(Runtime::kNumberAlloc, 0);
1887 __ pop(r1);
1888 __ LeaveInternalFrame();
1889
1890 __ bind(&heapnumber_allocated);
1891 }
1892
1893 if (CpuFeatures::IsSupported(VFP3)) {
1894 // Convert the int32 in r1 to the heap number in r0. r2 is corrupted.
1895 CpuFeatures::Scope scope(VFP3);
1896 __ vmov(s0, r1);
1897 __ vcvt_f64_s32(d0, s0);
1898 __ sub(r2, r0, Operand(kHeapObjectTag));
1899 __ vstr(d0, r2, HeapNumber::kValueOffset);
1900 __ Ret();
1901 } else {
1902 // WriteInt32ToHeapNumberStub does not trigger GC, so we do not
1903 // have to set up a frame.
1904 WriteInt32ToHeapNumberStub stub(r1, r0, r2);
1905 __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
1906 }
1907 }
1908
1909
1910 // TODO(svenpanne): Use virtual functions instead of switch.
1911 void TypeRecordingUnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
1912 switch (op_) {
1913 case Token::SUB:
1914 GenerateGenericStubSub(masm);
1915 break;
1916 case Token::BIT_NOT:
1917 GenerateGenericStubBitNot(masm);
1918 break;
1919 default:
1920 UNREACHABLE();
1921 }
1922 }
1923
1924
1925 void TypeRecordingUnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
1926 Label non_smi, slow;
1927 GenerateSmiCodeSub(masm, &non_smi, &slow);
1928 __ bind(&non_smi);
1929 GenerateHeapNumberCodeSub(masm, &slow);
1930 __ bind(&slow);
1931 GenerateGenericCodeFallback(masm);
1932 }
1933
1934
1935 void TypeRecordingUnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
1936 Label non_smi, slow;
1937 GenerateSmiCodeBitNot(masm, &non_smi);
1938 __ bind(&non_smi);
1939 GenerateHeapNumberCodeBitNot(masm, &slow);
1940 __ bind(&slow);
1941 GenerateGenericCodeFallback(masm);
1942 }
1943
1944
1945 void TypeRecordingUnaryOpStub::GenerateGenericCodeFallback(
1946 MacroAssembler* masm) {
1947 // Handle the slow case by jumping to the JavaScript builtin.
1948 __ push(r0);
1949 switch (op_) {
1950 case Token::SUB:
1951 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_JS);
1952 break;
1953 case Token::BIT_NOT:
1954 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_JS);
1955 break;
1956 default:
1957 UNREACHABLE();
1958 }
1959 }
1960
1961
1658 Handle<Code> GetTypeRecordingBinaryOpStub(int key, 1962 Handle<Code> GetTypeRecordingBinaryOpStub(int key,
1659 TRBinaryOpIC::TypeInfo type_info, 1963 TRBinaryOpIC::TypeInfo type_info,
1660 TRBinaryOpIC::TypeInfo result_type_info) { 1964 TRBinaryOpIC::TypeInfo result_type_info) {
1661 TypeRecordingBinaryOpStub stub(key, type_info, result_type_info); 1965 TypeRecordingBinaryOpStub stub(key, type_info, result_type_info);
1662 return stub.GetCode(); 1966 return stub.GetCode();
1663 } 1967 }
1664 1968
1665 1969
1666 void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { 1970 void TypeRecordingBinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1667 Label get_result; 1971 Label get_result;
(...skipping 4109 matching lines...) Expand 10 before | Expand all | Expand 10 after
5777 __ str(pc, MemOperand(sp, 0)); 6081 __ str(pc, MemOperand(sp, 0));
5778 __ Jump(target); // Call the C++ function. 6082 __ Jump(target); // Call the C++ function.
5779 } 6083 }
5780 6084
5781 6085
5782 #undef __ 6086 #undef __
5783 6087
5784 } } // namespace v8::internal 6088 } } // namespace v8::internal
5785 6089
5786 #endif // V8_TARGET_ARCH_ARM 6090 #endif // V8_TARGET_ARCH_ARM
OLDNEW

Powered by Google App Engine
This is Rietveld 408576698