Chromium Code Reviews
chromiumcodereview-hr@appspot.gserviceaccount.com (chromiumcodereview-hr) | Please choose your nickname with Settings | Help | Chromium Project | Gerrit Changes | Sign out
(71)

Side by Side Diff: src/mips/code-stubs-mips.cc

Issue 148503002: A64: Synchronize with r15545. (Closed) Base URL: https://v8.googlecode.com/svn/branches/experimental/a64
Patch Set: Created 6 years, 10 months ago
Use n/p to move between diff chunks; N/P to move between comments. Draft comments are only viewable by you.
Jump to:
View unified diff | Download patch | Annotate | Revision Log
« no previous file with comments | « src/mips/code-stubs-mips.h ('k') | src/mips/codegen-mips.cc » ('j') | no next file with comments »
Toggle Intra-line Diffs ('i') | Expand Comments ('e') | Collapse Comments ('c') | Show Comments Hide Comments ('s')
OLDNEW
1 // Copyright 2012 the V8 project authors. All rights reserved. 1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without 2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are 3 // modification, are permitted provided that the following conditions are
4 // met: 4 // met:
5 // 5 //
6 // * Redistributions of source code must retain the above copyright 6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer. 7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above 8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following 9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided 10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution. 11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its 12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived 13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission. 14 // from this software without specific prior written permission.
15 // 15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 27
28 #include "v8.h" 28 #include "v8.h"
29 29
30 #if defined(V8_TARGET_ARCH_MIPS) 30 #if V8_TARGET_ARCH_MIPS
31 31
32 #include "bootstrapper.h" 32 #include "bootstrapper.h"
33 #include "code-stubs.h" 33 #include "code-stubs.h"
34 #include "codegen.h" 34 #include "codegen.h"
35 #include "regexp-macro-assembler.h" 35 #include "regexp-macro-assembler.h"
36 #include "stub-cache.h" 36 #include "stub-cache.h"
37 37
38 namespace v8 { 38 namespace v8 {
39 namespace internal { 39 namespace internal {
40 40
(...skipping 179 matching lines...) Expand 10 before | Expand all | Expand 10 after
220 } 220 }
221 221
222 222
223 void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor( 223 void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
224 Isolate* isolate, 224 Isolate* isolate,
225 CodeStubInterfaceDescriptor* descriptor) { 225 CodeStubInterfaceDescriptor* descriptor) {
226 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1); 226 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1);
227 } 227 }
228 228
229 229
230 void UnaryOpStub::InitializeInterfaceDescriptor(
231 Isolate* isolate,
232 CodeStubInterfaceDescriptor* descriptor) {
233 static Register registers[] = { a0 };
234 descriptor->register_param_count_ = 1;
235 descriptor->register_params_ = registers;
236 descriptor->deoptimization_handler_ =
237 FUNCTION_ADDR(UnaryOpIC_Miss);
238 }
239
240
241 void StoreGlobalStub::InitializeInterfaceDescriptor(
242 Isolate* isolate,
243 CodeStubInterfaceDescriptor* descriptor) {
244 static Register registers[] = { a1, a2, a0 };
245 descriptor->register_param_count_ = 3;
246 descriptor->register_params_ = registers;
247 descriptor->deoptimization_handler_ =
248 FUNCTION_ADDR(StoreIC_MissFromStubFailure);
249 }
250
251
230 #define __ ACCESS_MASM(masm) 252 #define __ ACCESS_MASM(masm)
231 253
254
232 static void EmitIdenticalObjectComparison(MacroAssembler* masm, 255 static void EmitIdenticalObjectComparison(MacroAssembler* masm,
233 Label* slow, 256 Label* slow,
234 Condition cc); 257 Condition cc);
235 static void EmitSmiNonsmiComparison(MacroAssembler* masm, 258 static void EmitSmiNonsmiComparison(MacroAssembler* masm,
236 Register lhs, 259 Register lhs,
237 Register rhs, 260 Register rhs,
238 Label* rhs_not_nan, 261 Label* rhs_not_nan,
239 Label* slow, 262 Label* slow,
240 bool strict); 263 bool strict);
241 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm, 264 static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
(...skipping 932 matching lines...) Expand 10 before | Expand all | Expand 10 after
1174 __ Branch(&return_not_equal, eq, a2, Operand(ODDBALL_TYPE)); 1197 __ Branch(&return_not_equal, eq, a2, Operand(ODDBALL_TYPE));
1175 1198
1176 __ GetObjectType(rhs, a3, a3); 1199 __ GetObjectType(rhs, a3, a3);
1177 __ Branch(&return_not_equal, greater, a3, Operand(FIRST_SPEC_OBJECT_TYPE)); 1200 __ Branch(&return_not_equal, greater, a3, Operand(FIRST_SPEC_OBJECT_TYPE));
1178 1201
1179 // Check for oddballs: true, false, null, undefined. 1202 // Check for oddballs: true, false, null, undefined.
1180 __ Branch(&return_not_equal, eq, a3, Operand(ODDBALL_TYPE)); 1203 __ Branch(&return_not_equal, eq, a3, Operand(ODDBALL_TYPE));
1181 1204
1182 // Now that we have the types we might as well check for 1205 // Now that we have the types we might as well check for
1183 // internalized-internalized. 1206 // internalized-internalized.
1184 // Ensure that no non-strings have the internalized bit set. 1207 Label not_internalized;
1185 STATIC_ASSERT(LAST_TYPE < kNotStringTag + kIsInternalizedMask);
1186 STATIC_ASSERT(kInternalizedTag != 0); 1208 STATIC_ASSERT(kInternalizedTag != 0);
1187 __ And(t2, a2, Operand(a3)); 1209 __ And(t2, a2, Operand(kIsNotStringMask | kIsInternalizedMask));
1188 __ And(t0, t2, Operand(kIsInternalizedMask)); 1210 __ Branch(&not_internalized, ne, t2,
1189 __ Branch(&return_not_equal, ne, t0, Operand(zero_reg)); 1211 Operand(kInternalizedTag | kStringTag));
1212
1213 __ And(a3, a3, Operand(kIsNotStringMask | kIsInternalizedMask));
1214 __ Branch(&return_not_equal, eq, a3,
1215 Operand(kInternalizedTag | kStringTag));
1216
1217 __ bind(&not_internalized);
1190 } 1218 }
1191 1219
1192 1220
1193 static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm, 1221 static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
1194 Register lhs, 1222 Register lhs,
1195 Register rhs, 1223 Register rhs,
1196 Label* both_loaded_as_doubles, 1224 Label* both_loaded_as_doubles,
1197 Label* not_heap_numbers, 1225 Label* not_heap_numbers,
1198 Label* slow) { 1226 Label* slow) {
1199 __ GetObjectType(lhs, a3, a2); 1227 __ GetObjectType(lhs, a3, a2);
(...skipping 13 matching lines...) Expand all
1213 1241
1214 // Fast negative check for internalized-to-internalized equality. 1242 // Fast negative check for internalized-to-internalized equality.
1215 static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm, 1243 static void EmitCheckForInternalizedStringsOrObjects(MacroAssembler* masm,
1216 Register lhs, 1244 Register lhs,
1217 Register rhs, 1245 Register rhs,
1218 Label* possible_strings, 1246 Label* possible_strings,
1219 Label* not_both_strings) { 1247 Label* not_both_strings) {
1220 ASSERT((lhs.is(a0) && rhs.is(a1)) || 1248 ASSERT((lhs.is(a0) && rhs.is(a1)) ||
1221 (lhs.is(a1) && rhs.is(a0))); 1249 (lhs.is(a1) && rhs.is(a0)));
1222 1250
1223 // a2 is object type of lhs. 1251 // a2 is object type of rhs.
1224 // Ensure that no non-strings have the internalized bit set.
1225 Label object_test; 1252 Label object_test;
1226 STATIC_ASSERT(kInternalizedTag != 0); 1253 STATIC_ASSERT(kInternalizedTag != 0);
1227 __ And(at, a2, Operand(kIsNotStringMask)); 1254 __ And(at, a2, Operand(kIsNotStringMask));
1228 __ Branch(&object_test, ne, at, Operand(zero_reg)); 1255 __ Branch(&object_test, ne, at, Operand(zero_reg));
1229 __ And(at, a2, Operand(kIsInternalizedMask)); 1256 __ And(at, a2, Operand(kIsInternalizedMask));
1230 __ Branch(possible_strings, eq, at, Operand(zero_reg)); 1257 __ Branch(possible_strings, eq, at, Operand(zero_reg));
1231 __ GetObjectType(rhs, a3, a3); 1258 __ GetObjectType(rhs, a3, a3);
1232 __ Branch(not_both_strings, ge, a3, Operand(FIRST_NONSTRING_TYPE)); 1259 __ Branch(not_both_strings, ge, a3, Operand(FIRST_NONSTRING_TYPE));
1233 __ And(at, a3, Operand(kIsInternalizedMask)); 1260 __ And(at, a3, Operand(kIsInternalizedMask));
1234 __ Branch(possible_strings, eq, at, Operand(zero_reg)); 1261 __ Branch(possible_strings, eq, at, Operand(zero_reg));
(...skipping 340 matching lines...) Expand 10 before | Expand all | Expand 10 after
1575 argument_count); 1602 argument_count);
1576 if (save_doubles_ == kSaveFPRegs) { 1603 if (save_doubles_ == kSaveFPRegs) {
1577 __ MultiPopFPU(kCallerSavedFPU); 1604 __ MultiPopFPU(kCallerSavedFPU);
1578 } 1605 }
1579 1606
1580 __ MultiPop(kJSCallerSaved | ra.bit()); 1607 __ MultiPop(kJSCallerSaved | ra.bit());
1581 __ Ret(); 1608 __ Ret();
1582 } 1609 }
1583 1610
1584 1611
1585 void UnaryOpStub::PrintName(StringStream* stream) {
1586 const char* op_name = Token::Name(op_);
1587 const char* overwrite_name = NULL; // Make g++ happy.
1588 switch (mode_) {
1589 case UNARY_NO_OVERWRITE: overwrite_name = "Alloc"; break;
1590 case UNARY_OVERWRITE: overwrite_name = "Overwrite"; break;
1591 }
1592 stream->Add("UnaryOpStub_%s_%s_%s",
1593 op_name,
1594 overwrite_name,
1595 UnaryOpIC::GetName(operand_type_));
1596 }
1597
1598
1599 // TODO(svenpanne): Use virtual functions instead of switch.
1600 void UnaryOpStub::Generate(MacroAssembler* masm) {
1601 switch (operand_type_) {
1602 case UnaryOpIC::UNINITIALIZED:
1603 GenerateTypeTransition(masm);
1604 break;
1605 case UnaryOpIC::SMI:
1606 GenerateSmiStub(masm);
1607 break;
1608 case UnaryOpIC::NUMBER:
1609 GenerateNumberStub(masm);
1610 break;
1611 case UnaryOpIC::GENERIC:
1612 GenerateGenericStub(masm);
1613 break;
1614 }
1615 }
1616
1617
1618 void UnaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1619 // Argument is in a0 and v0 at this point, so we can overwrite a0.
1620 __ li(a2, Operand(Smi::FromInt(op_)));
1621 __ li(a1, Operand(Smi::FromInt(mode_)));
1622 __ li(a0, Operand(Smi::FromInt(operand_type_)));
1623 __ Push(v0, a2, a1, a0);
1624
1625 __ TailCallExternalReference(
1626 ExternalReference(IC_Utility(IC::kUnaryOp_Patch), masm->isolate()), 4, 1);
1627 }
1628
1629
1630 // TODO(svenpanne): Use virtual functions instead of switch.
1631 void UnaryOpStub::GenerateSmiStub(MacroAssembler* masm) {
1632 switch (op_) {
1633 case Token::SUB:
1634 GenerateSmiStubSub(masm);
1635 break;
1636 case Token::BIT_NOT:
1637 GenerateSmiStubBitNot(masm);
1638 break;
1639 default:
1640 UNREACHABLE();
1641 }
1642 }
1643
1644
1645 void UnaryOpStub::GenerateSmiStubSub(MacroAssembler* masm) {
1646 Label non_smi, slow;
1647 GenerateSmiCodeSub(masm, &non_smi, &slow);
1648 __ bind(&non_smi);
1649 __ bind(&slow);
1650 GenerateTypeTransition(masm);
1651 }
1652
1653
1654 void UnaryOpStub::GenerateSmiStubBitNot(MacroAssembler* masm) {
1655 Label non_smi;
1656 GenerateSmiCodeBitNot(masm, &non_smi);
1657 __ bind(&non_smi);
1658 GenerateTypeTransition(masm);
1659 }
1660
1661
1662 void UnaryOpStub::GenerateSmiCodeSub(MacroAssembler* masm,
1663 Label* non_smi,
1664 Label* slow) {
1665 __ JumpIfNotSmi(a0, non_smi);
1666
1667 // The result of negating zero or the smallest negative smi is not a smi.
1668 __ And(t0, a0, ~0x80000000);
1669 __ Branch(slow, eq, t0, Operand(zero_reg));
1670
1671 // Return '0 - value'.
1672 __ Ret(USE_DELAY_SLOT);
1673 __ subu(v0, zero_reg, a0);
1674 }
1675
1676
1677 void UnaryOpStub::GenerateSmiCodeBitNot(MacroAssembler* masm,
1678 Label* non_smi) {
1679 __ JumpIfNotSmi(a0, non_smi);
1680
1681 // Flip bits and revert inverted smi-tag.
1682 __ Neg(v0, a0);
1683 __ And(v0, v0, ~kSmiTagMask);
1684 __ Ret();
1685 }
1686
1687
1688 // TODO(svenpanne): Use virtual functions instead of switch.
1689 void UnaryOpStub::GenerateNumberStub(MacroAssembler* masm) {
1690 switch (op_) {
1691 case Token::SUB:
1692 GenerateNumberStubSub(masm);
1693 break;
1694 case Token::BIT_NOT:
1695 GenerateNumberStubBitNot(masm);
1696 break;
1697 default:
1698 UNREACHABLE();
1699 }
1700 }
1701
1702
1703 void UnaryOpStub::GenerateNumberStubSub(MacroAssembler* masm) {
1704 Label non_smi, slow, call_builtin;
1705 GenerateSmiCodeSub(masm, &non_smi, &call_builtin);
1706 __ bind(&non_smi);
1707 GenerateHeapNumberCodeSub(masm, &slow);
1708 __ bind(&slow);
1709 GenerateTypeTransition(masm);
1710 __ bind(&call_builtin);
1711 GenerateGenericCodeFallback(masm);
1712 }
1713
1714
1715 void UnaryOpStub::GenerateNumberStubBitNot(MacroAssembler* masm) {
1716 Label non_smi, slow;
1717 GenerateSmiCodeBitNot(masm, &non_smi);
1718 __ bind(&non_smi);
1719 GenerateHeapNumberCodeBitNot(masm, &slow);
1720 __ bind(&slow);
1721 GenerateTypeTransition(masm);
1722 }
1723
1724
1725 void UnaryOpStub::GenerateHeapNumberCodeSub(MacroAssembler* masm,
1726 Label* slow) {
1727 EmitCheckForHeapNumber(masm, a0, a1, t2, slow);
1728 // a0 is a heap number. Get a new heap number in a1.
1729 if (mode_ == UNARY_OVERWRITE) {
1730 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
1731 __ Xor(a2, a2, Operand(HeapNumber::kSignMask)); // Flip sign.
1732 __ Ret(USE_DELAY_SLOT);
1733 __ sw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
1734 } else {
1735 Label slow_allocate_heapnumber, heapnumber_allocated;
1736 __ AllocateHeapNumber(a1, a2, a3, t2, &slow_allocate_heapnumber);
1737 __ jmp(&heapnumber_allocated);
1738
1739 __ bind(&slow_allocate_heapnumber);
1740 {
1741 FrameScope scope(masm, StackFrame::INTERNAL);
1742 __ push(a0);
1743 __ CallRuntime(Runtime::kNumberAlloc, 0);
1744 __ mov(a1, v0);
1745 __ pop(a0);
1746 }
1747
1748 __ bind(&heapnumber_allocated);
1749 __ lw(a3, FieldMemOperand(a0, HeapNumber::kMantissaOffset));
1750 __ lw(a2, FieldMemOperand(a0, HeapNumber::kExponentOffset));
1751 __ sw(a3, FieldMemOperand(a1, HeapNumber::kMantissaOffset));
1752 __ Xor(a2, a2, Operand(HeapNumber::kSignMask)); // Flip sign.
1753 __ sw(a2, FieldMemOperand(a1, HeapNumber::kExponentOffset));
1754 __ Ret(USE_DELAY_SLOT);
1755 __ mov(v0, a1);
1756 }
1757 }
1758
1759
1760 void UnaryOpStub::GenerateHeapNumberCodeBitNot(
1761 MacroAssembler* masm,
1762 Label* slow) {
1763 Label impossible;
1764
1765 EmitCheckForHeapNumber(masm, a0, a1, t2, slow);
1766 // Convert the heap number in a0 to an untagged integer in a1.
1767 __ ConvertToInt32(a0, a1, a2, a3, f0, slow);
1768
1769 // Do the bitwise operation and check if the result fits in a smi.
1770 Label try_float;
1771 __ Neg(a1, a1);
1772 __ Addu(a2, a1, Operand(0x40000000));
1773 __ Branch(&try_float, lt, a2, Operand(zero_reg));
1774
1775 // Tag the result as a smi and we're done.
1776 __ Ret(USE_DELAY_SLOT); // SmiTag emits one instruction in delay slot.
1777 __ SmiTag(v0, a1);
1778
1779 // Try to store the result in a heap number.
1780 __ bind(&try_float);
1781 if (mode_ == UNARY_NO_OVERWRITE) {
1782 Label slow_allocate_heapnumber, heapnumber_allocated;
1783 // Allocate a new heap number without zapping v0, which we need if it fails.
1784 __ AllocateHeapNumber(a2, a3, t0, t2, &slow_allocate_heapnumber);
1785 __ jmp(&heapnumber_allocated);
1786
1787 __ bind(&slow_allocate_heapnumber);
1788 {
1789 FrameScope scope(masm, StackFrame::INTERNAL);
1790 __ push(v0); // Push the heap number, not the untagged int32.
1791 __ CallRuntime(Runtime::kNumberAlloc, 0);
1792 __ mov(a2, v0); // Move the new heap number into a2.
1793 // Get the heap number into v0, now that the new heap number is in a2.
1794 __ pop(v0);
1795 }
1796
1797 // Convert the heap number in v0 to an untagged integer in a1.
1798 // This can't go slow-case because it's the same number we already
1799 // converted once again.
1800 __ ConvertToInt32(v0, a1, a3, t0, f0, &impossible);
1801 // Negate the result.
1802 __ Xor(a1, a1, -1);
1803
1804 __ bind(&heapnumber_allocated);
1805 __ mov(v0, a2); // Move newly allocated heap number to v0.
1806 }
1807
1808 // Convert the int32 in a1 to the heap number in v0. a2 is corrupted.
1809 __ mtc1(a1, f0);
1810 __ cvt_d_w(f0, f0);
1811 __ sdc1(f0, FieldMemOperand(v0, HeapNumber::kValueOffset));
1812 __ Ret();
1813
1814 __ bind(&impossible);
1815 if (FLAG_debug_code) {
1816 __ stop("Incorrect assumption in bit-not stub");
1817 }
1818 }
1819
1820
1821 // TODO(svenpanne): Use virtual functions instead of switch.
1822 void UnaryOpStub::GenerateGenericStub(MacroAssembler* masm) {
1823 switch (op_) {
1824 case Token::SUB:
1825 GenerateGenericStubSub(masm);
1826 break;
1827 case Token::BIT_NOT:
1828 GenerateGenericStubBitNot(masm);
1829 break;
1830 default:
1831 UNREACHABLE();
1832 }
1833 }
1834
1835
1836 void UnaryOpStub::GenerateGenericStubSub(MacroAssembler* masm) {
1837 Label non_smi, slow;
1838 GenerateSmiCodeSub(masm, &non_smi, &slow);
1839 __ bind(&non_smi);
1840 GenerateHeapNumberCodeSub(masm, &slow);
1841 __ bind(&slow);
1842 GenerateGenericCodeFallback(masm);
1843 }
1844
1845
1846 void UnaryOpStub::GenerateGenericStubBitNot(MacroAssembler* masm) {
1847 Label non_smi, slow;
1848 GenerateSmiCodeBitNot(masm, &non_smi);
1849 __ bind(&non_smi);
1850 GenerateHeapNumberCodeBitNot(masm, &slow);
1851 __ bind(&slow);
1852 GenerateGenericCodeFallback(masm);
1853 }
1854
1855
1856 void UnaryOpStub::GenerateGenericCodeFallback(
1857 MacroAssembler* masm) {
1858 // Handle the slow case by jumping to the JavaScript builtin.
1859 __ push(a0);
1860 switch (op_) {
1861 case Token::SUB:
1862 __ InvokeBuiltin(Builtins::UNARY_MINUS, JUMP_FUNCTION);
1863 break;
1864 case Token::BIT_NOT:
1865 __ InvokeBuiltin(Builtins::BIT_NOT, JUMP_FUNCTION);
1866 break;
1867 default:
1868 UNREACHABLE();
1869 }
1870 }
1871
1872
1873 void BinaryOpStub::Initialize() { 1612 void BinaryOpStub::Initialize() {
1874 platform_specific_bit_ = true; // FPU is a base requirement for V8. 1613 platform_specific_bit_ = true; // FPU is a base requirement for V8.
1875 } 1614 }
1876 1615
1877 1616
1878 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) { 1617 void BinaryOpStub::GenerateTypeTransition(MacroAssembler* masm) {
1879 Label get_result; 1618 Label get_result;
1880 1619
1881 __ Push(a1, a0); 1620 __ Push(a1, a0);
1882 1621
(...skipping 1636 matching lines...) Expand 10 before | Expand all | Expand 10 after
3519 3258
3520 void CEntryStub::Generate(MacroAssembler* masm) { 3259 void CEntryStub::Generate(MacroAssembler* masm) {
3521 // Called from JavaScript; parameters are on stack as if calling JS function 3260 // Called from JavaScript; parameters are on stack as if calling JS function
3522 // s0: number of arguments including receiver 3261 // s0: number of arguments including receiver
3523 // s1: size of arguments excluding receiver 3262 // s1: size of arguments excluding receiver
3524 // s2: pointer to builtin function 3263 // s2: pointer to builtin function
3525 // fp: frame pointer (restored after C call) 3264 // fp: frame pointer (restored after C call)
3526 // sp: stack pointer (restored as callee's sp after C call) 3265 // sp: stack pointer (restored as callee's sp after C call)
3527 // cp: current context (C callee-saved) 3266 // cp: current context (C callee-saved)
3528 3267
3268 ProfileEntryHookStub::MaybeCallEntryHook(masm);
3269
3529 // NOTE: Invocations of builtins may return failure objects 3270 // NOTE: Invocations of builtins may return failure objects
3530 // instead of a proper result. The builtin entry handles 3271 // instead of a proper result. The builtin entry handles
3531 // this by performing a garbage collection and retrying the 3272 // this by performing a garbage collection and retrying the
3532 // builtin once. 3273 // builtin once.
3533 3274
3534 // NOTE: s0-s2 hold the arguments of this function instead of a0-a2. 3275 // NOTE: s0-s2 hold the arguments of this function instead of a0-a2.
3535 // The reason for this is that these arguments would need to be saved anyway 3276 // The reason for this is that these arguments would need to be saved anyway
3536 // so it's faster to set them up directly. 3277 // so it's faster to set them up directly.
3537 // See MacroAssembler::PrepareCEntryArgs and PrepareCEntryFunction. 3278 // See MacroAssembler::PrepareCEntryArgs and PrepareCEntryFunction.
3538 3279
(...skipping 73 matching lines...) Expand 10 before | Expand all | Expand 10 after
3612 // Registers: 3353 // Registers:
3613 // a0: entry address 3354 // a0: entry address
3614 // a1: function 3355 // a1: function
3615 // a2: receiver 3356 // a2: receiver
3616 // a3: argc 3357 // a3: argc
3617 // 3358 //
3618 // Stack: 3359 // Stack:
3619 // 4 args slots 3360 // 4 args slots
3620 // args 3361 // args
3621 3362
3363 ProfileEntryHookStub::MaybeCallEntryHook(masm);
3364
3622 // Save callee saved registers on the stack. 3365 // Save callee saved registers on the stack.
3623 __ MultiPush(kCalleeSaved | ra.bit()); 3366 __ MultiPush(kCalleeSaved | ra.bit());
3624 3367
3625 // Save callee-saved FPU registers. 3368 // Save callee-saved FPU registers.
3626 __ MultiPushFPU(kCalleeSavedFPU); 3369 __ MultiPushFPU(kCalleeSavedFPU);
3627 // Set up the reserved register for 0.0. 3370 // Set up the reserved register for 0.0.
3628 __ Move(kDoubleRegZero, 0.0); 3371 __ Move(kDoubleRegZero, 0.0);
3629 3372
3630 3373
3631 // Load argv in s0 register. 3374 // Load argv in s0 register.
(...skipping 1402 matching lines...) Expand 10 before | Expand all | Expand 10 after
5034 4777
5035 // A monomorphic cache hit or an already megamorphic state: invoke the 4778 // A monomorphic cache hit or an already megamorphic state: invoke the
5036 // function without changing the state. 4779 // function without changing the state.
5037 __ Branch(&done, eq, a3, Operand(a1)); 4780 __ Branch(&done, eq, a3, Operand(a1));
5038 __ LoadRoot(at, Heap::kUndefinedValueRootIndex); 4781 __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
5039 __ Branch(&done, eq, a3, Operand(at)); 4782 __ Branch(&done, eq, a3, Operand(at));
5040 4783
5041 // Special handling of the Array() function, which caches not only the 4784 // Special handling of the Array() function, which caches not only the
5042 // monomorphic Array function but the initial ElementsKind with special 4785 // monomorphic Array function but the initial ElementsKind with special
5043 // sentinels 4786 // sentinels
5044 Handle<Object> terminal_kind_sentinel =
5045 TypeFeedbackCells::MonomorphicArraySentinel(masm->isolate(),
5046 LAST_FAST_ELEMENTS_KIND);
5047 __ JumpIfNotSmi(a3, &miss); 4787 __ JumpIfNotSmi(a3, &miss);
5048 __ Branch(&miss, gt, a3, Operand(terminal_kind_sentinel)); 4788 if (FLAG_debug_code) {
4789 Handle<Object> terminal_kind_sentinel =
4790 TypeFeedbackCells::MonomorphicArraySentinel(masm->isolate(),
4791 LAST_FAST_ELEMENTS_KIND);
4792 __ Assert(le, "Array function sentinel is not an ElementsKind",
4793 a3, Operand(terminal_kind_sentinel));
4794 }
4795
5049 // Make sure the function is the Array() function 4796 // Make sure the function is the Array() function
5050 __ LoadArrayFunction(a3); 4797 __ LoadArrayFunction(a3);
5051 __ Branch(&megamorphic, ne, a1, Operand(a3)); 4798 __ Branch(&megamorphic, ne, a1, Operand(a3));
5052 __ jmp(&done); 4799 __ jmp(&done);
5053 4800
5054 __ bind(&miss); 4801 __ bind(&miss);
5055 4802
5056 // A monomorphic miss (i.e, here the cache is not uninitialized) goes 4803 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
5057 // megamorphic. 4804 // megamorphic.
5058 __ LoadRoot(at, Heap::kTheHoleValueRootIndex); 4805 __ LoadRoot(at, Heap::kTheHoleValueRootIndex);
(...skipping 1546 matching lines...) Expand 10 before | Expand all | Expand 10 after
6605 6352
6606 // Check that both operands are heap objects. 6353 // Check that both operands are heap objects.
6607 __ JumpIfEitherSmi(left, right, &miss); 6354 __ JumpIfEitherSmi(left, right, &miss);
6608 6355
6609 // Check that both operands are internalized strings. 6356 // Check that both operands are internalized strings.
6610 __ lw(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); 6357 __ lw(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
6611 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); 6358 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
6612 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); 6359 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
6613 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); 6360 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
6614 STATIC_ASSERT(kInternalizedTag != 0); 6361 STATIC_ASSERT(kInternalizedTag != 0);
6615 __ And(tmp1, tmp1, Operand(tmp2)); 6362
6616 __ And(tmp1, tmp1, kIsInternalizedMask); 6363 __ And(tmp1, tmp1, Operand(kIsNotStringMask | kIsInternalizedMask));
6617 __ Branch(&miss, eq, tmp1, Operand(zero_reg)); 6364 __ Branch(&miss, ne, tmp1, Operand(kInternalizedTag | kStringTag));
6365
6366 __ And(tmp2, tmp2, Operand(kIsNotStringMask | kIsInternalizedMask));
6367 __ Branch(&miss, ne, tmp2, Operand(kInternalizedTag | kStringTag));
6368
6618 // Make sure a0 is non-zero. At this point input operands are 6369 // Make sure a0 is non-zero. At this point input operands are
6619 // guaranteed to be non-zero. 6370 // guaranteed to be non-zero.
6620 ASSERT(right.is(a0)); 6371 ASSERT(right.is(a0));
6621 STATIC_ASSERT(EQUAL == 0); 6372 STATIC_ASSERT(EQUAL == 0);
6622 STATIC_ASSERT(kSmiTag == 0); 6373 STATIC_ASSERT(kSmiTag == 0);
6623 __ mov(v0, right); 6374 __ mov(v0, right);
6624 // Internalized strings are compared by identity. 6375 // Internalized strings are compared by identity.
6625 __ Ret(ne, left, Operand(right)); 6376 __ Ret(ne, left, Operand(right));
6626 ASSERT(is_int16(EQUAL)); 6377 ASSERT(is_int16(EQUAL));
6627 __ Ret(USE_DELAY_SLOT); 6378 __ Ret(USE_DELAY_SLOT);
(...skipping 19 matching lines...) Expand all
6647 __ JumpIfEitherSmi(left, right, &miss); 6398 __ JumpIfEitherSmi(left, right, &miss);
6648 6399
6649 // Check that both operands are unique names. This leaves the instance 6400 // Check that both operands are unique names. This leaves the instance
6650 // types loaded in tmp1 and tmp2. 6401 // types loaded in tmp1 and tmp2.
6651 STATIC_ASSERT(kInternalizedTag != 0); 6402 STATIC_ASSERT(kInternalizedTag != 0);
6652 __ lw(tmp1, FieldMemOperand(left, HeapObject::kMapOffset)); 6403 __ lw(tmp1, FieldMemOperand(left, HeapObject::kMapOffset));
6653 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset)); 6404 __ lw(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
6654 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset)); 6405 __ lbu(tmp1, FieldMemOperand(tmp1, Map::kInstanceTypeOffset));
6655 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset)); 6406 __ lbu(tmp2, FieldMemOperand(tmp2, Map::kInstanceTypeOffset));
6656 6407
6657 Label succeed1; 6408 __ JumpIfNotUniqueName(tmp1, &miss);
6658 __ And(at, tmp1, Operand(kIsInternalizedMask)); 6409 __ JumpIfNotUniqueName(tmp2, &miss);
6659 __ Branch(&succeed1, ne, at, Operand(zero_reg));
6660 __ Branch(&miss, ne, tmp1, Operand(SYMBOL_TYPE));
6661 __ bind(&succeed1);
6662
6663 Label succeed2;
6664 __ And(at, tmp2, Operand(kIsInternalizedMask));
6665 __ Branch(&succeed2, ne, at, Operand(zero_reg));
6666 __ Branch(&miss, ne, tmp2, Operand(SYMBOL_TYPE));
6667 __ bind(&succeed2);
6668 6410
6669 // Use a0 as result 6411 // Use a0 as result
6670 __ mov(v0, a0); 6412 __ mov(v0, a0);
6671 6413
6672 // Unique names are compared by identity. 6414 // Unique names are compared by identity.
6673 Label done; 6415 Label done;
6674 __ Branch(&done, ne, left, Operand(right)); 6416 __ Branch(&done, ne, left, Operand(right));
6675 // Make sure a0 is non-zero. At this point input operands are 6417 // Make sure a0 is non-zero. At this point input operands are
6676 // guaranteed to be non-zero. 6418 // guaranteed to be non-zero.
6677 ASSERT(right.is(a0)); 6419 ASSERT(right.is(a0));
(...skipping 42 matching lines...) Expand 10 before | Expand all | Expand 10 after
6720 STATIC_ASSERT(EQUAL == 0); 6462 STATIC_ASSERT(EQUAL == 0);
6721 STATIC_ASSERT(kSmiTag == 0); 6463 STATIC_ASSERT(kSmiTag == 0);
6722 __ Branch(&left_ne_right, ne, left, Operand(right)); 6464 __ Branch(&left_ne_right, ne, left, Operand(right));
6723 __ Ret(USE_DELAY_SLOT); 6465 __ Ret(USE_DELAY_SLOT);
6724 __ mov(v0, zero_reg); // In the delay slot. 6466 __ mov(v0, zero_reg); // In the delay slot.
6725 __ bind(&left_ne_right); 6467 __ bind(&left_ne_right);
6726 6468
6727 // Handle not identical strings. 6469 // Handle not identical strings.
6728 6470
6729 // Check that both strings are internalized strings. If they are, we're done 6471 // Check that both strings are internalized strings. If they are, we're done
6730 // because we already know they are not identical. 6472 // because we already know they are not identical. We know they are both
6473 // strings.
6731 if (equality) { 6474 if (equality) {
6732 ASSERT(GetCondition() == eq); 6475 ASSERT(GetCondition() == eq);
6733 STATIC_ASSERT(kInternalizedTag != 0); 6476 STATIC_ASSERT(kInternalizedTag != 0);
6734 __ And(tmp3, tmp1, Operand(tmp2)); 6477 __ And(tmp3, tmp1, Operand(tmp2));
6735 __ And(tmp5, tmp3, Operand(kIsInternalizedMask)); 6478 __ And(tmp5, tmp3, Operand(kIsInternalizedMask));
6736 Label is_symbol; 6479 Label is_symbol;
6737 __ Branch(&is_symbol, eq, tmp5, Operand(zero_reg)); 6480 __ Branch(&is_symbol, eq, tmp5, Operand(zero_reg));
6738 // Make sure a0 is non-zero. At this point input operands are 6481 // Make sure a0 is non-zero. At this point input operands are
6739 // guaranteed to be non-zero. 6482 // guaranteed to be non-zero.
6740 ASSERT(right.is(a0)); 6483 ASSERT(right.is(a0));
(...skipping 59 matching lines...) Expand 10 before | Expand all | Expand 10 after
6800 __ Branch(&miss, ne, a2, Operand(known_map_)); 6543 __ Branch(&miss, ne, a2, Operand(known_map_));
6801 __ Branch(&miss, ne, a3, Operand(known_map_)); 6544 __ Branch(&miss, ne, a3, Operand(known_map_));
6802 6545
6803 __ Ret(USE_DELAY_SLOT); 6546 __ Ret(USE_DELAY_SLOT);
6804 __ subu(v0, a0, a1); 6547 __ subu(v0, a0, a1);
6805 6548
6806 __ bind(&miss); 6549 __ bind(&miss);
6807 GenerateMiss(masm); 6550 GenerateMiss(masm);
6808 } 6551 }
6809 6552
6553
6810 void ICCompareStub::GenerateMiss(MacroAssembler* masm) { 6554 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
6811 { 6555 {
6812 // Call the runtime system in a fresh internal frame. 6556 // Call the runtime system in a fresh internal frame.
6813 ExternalReference miss = 6557 ExternalReference miss =
6814 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate()); 6558 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
6815 FrameScope scope(masm, StackFrame::INTERNAL); 6559 FrameScope scope(masm, StackFrame::INTERNAL);
6816 __ Push(a1, a0); 6560 __ Push(a1, a0);
6817 __ push(ra); 6561 __ push(ra);
6818 __ Push(a1, a0); 6562 __ Push(a1, a0);
6819 __ li(t0, Operand(Smi::FromInt(op_))); 6563 __ li(t0, Operand(Smi::FromInt(op_)));
(...skipping 105 matching lines...) Expand 10 before | Expand all | Expand 10 after
6925 // Stop if found the property. 6669 // Stop if found the property.
6926 __ Branch(miss, eq, entity_name, Operand(Handle<Name>(name))); 6670 __ Branch(miss, eq, entity_name, Operand(Handle<Name>(name)));
6927 6671
6928 Label good; 6672 Label good;
6929 __ Branch(&good, eq, entity_name, Operand(tmp)); 6673 __ Branch(&good, eq, entity_name, Operand(tmp));
6930 6674
6931 // Check if the entry name is not a unique name. 6675 // Check if the entry name is not a unique name.
6932 __ lw(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset)); 6676 __ lw(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
6933 __ lbu(entity_name, 6677 __ lbu(entity_name,
6934 FieldMemOperand(entity_name, Map::kInstanceTypeOffset)); 6678 FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
6935 __ And(scratch0, entity_name, Operand(kIsInternalizedMask)); 6679 __ JumpIfNotUniqueName(entity_name, miss);
6936 __ Branch(&good, ne, scratch0, Operand(zero_reg));
6937 __ Branch(miss, ne, entity_name, Operand(SYMBOL_TYPE));
6938
6939 __ bind(&good); 6680 __ bind(&good);
6940 6681
6941 // Restore the properties. 6682 // Restore the properties.
6942 __ lw(properties, 6683 __ lw(properties,
6943 FieldMemOperand(receiver, JSObject::kPropertiesOffset)); 6684 FieldMemOperand(receiver, JSObject::kPropertiesOffset));
6944 } 6685 }
6945 6686
6946 const int spill_mask = 6687 const int spill_mask =
6947 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() | 6688 (ra.bit() | t2.bit() | t1.bit() | t0.bit() | a3.bit() |
6948 a2.bit() | a1.bit() | a0.bit() | v0.bit()); 6689 a2.bit() | a1.bit() | a0.bit() | v0.bit());
(...skipping 153 matching lines...) Expand 10 before | Expand all | Expand 10 after
7102 __ lw(entry_key, FieldMemOperand(index, kElementsStartOffset)); 6843 __ lw(entry_key, FieldMemOperand(index, kElementsStartOffset));
7103 6844
7104 // Having undefined at this place means the name is not contained. 6845 // Having undefined at this place means the name is not contained.
7105 __ Branch(&not_in_dictionary, eq, entry_key, Operand(undefined)); 6846 __ Branch(&not_in_dictionary, eq, entry_key, Operand(undefined));
7106 6847
7107 // Stop if found the property. 6848 // Stop if found the property.
7108 __ Branch(&in_dictionary, eq, entry_key, Operand(key)); 6849 __ Branch(&in_dictionary, eq, entry_key, Operand(key));
7109 6850
7110 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) { 6851 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
7111 // Check if the entry name is not a unique name. 6852 // Check if the entry name is not a unique name.
7112 Label cont;
7113 __ lw(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset)); 6853 __ lw(entry_key, FieldMemOperand(entry_key, HeapObject::kMapOffset));
7114 __ lbu(entry_key, 6854 __ lbu(entry_key,
7115 FieldMemOperand(entry_key, Map::kInstanceTypeOffset)); 6855 FieldMemOperand(entry_key, Map::kInstanceTypeOffset));
7116 __ And(result, entry_key, Operand(kIsInternalizedMask)); 6856 __ JumpIfNotUniqueName(entry_key, &maybe_in_dictionary);
7117 __ Branch(&cont, ne, result, Operand(zero_reg));
7118 __ Branch(&maybe_in_dictionary, ne, entry_key, Operand(SYMBOL_TYPE));
7119 __ bind(&cont);
7120 } 6857 }
7121 } 6858 }
7122 6859
7123 __ bind(&maybe_in_dictionary); 6860 __ bind(&maybe_in_dictionary);
7124 // If we are doing negative lookup then probing failure should be 6861 // If we are doing negative lookup then probing failure should be
7125 // treated as a lookup success. For positive lookup probing failure 6862 // treated as a lookup success. For positive lookup probing failure
7126 // should be treated as lookup failure. 6863 // should be treated as lookup failure.
7127 if (mode_ == POSITIVE_LOOKUP) { 6864 if (mode_ == POSITIVE_LOOKUP) {
7128 __ Ret(USE_DELAY_SLOT); 6865 __ Ret(USE_DELAY_SLOT);
7129 __ mov(result, zero_reg); 6866 __ mov(result, zero_reg);
7130 } 6867 }
7131 6868
7132 __ bind(&in_dictionary); 6869 __ bind(&in_dictionary);
7133 __ Ret(USE_DELAY_SLOT); 6870 __ Ret(USE_DELAY_SLOT);
7134 __ li(result, 1); 6871 __ li(result, 1);
7135 6872
7136 __ bind(&not_in_dictionary); 6873 __ bind(&not_in_dictionary);
7137 __ Ret(USE_DELAY_SLOT); 6874 __ Ret(USE_DELAY_SLOT);
7138 __ mov(result, zero_reg); 6875 __ mov(result, zero_reg);
7139 } 6876 }
7140 6877
7141 6878
7142 struct AheadOfTimeWriteBarrierStubList { 6879 struct AheadOfTimeWriteBarrierStubList {
7143 Register object, value, address; 6880 Register object, value, address;
7144 RememberedSetAction action; 6881 RememberedSetAction action;
7145 }; 6882 };
7146 6883
6884
7147 #define REG(Name) { kRegister_ ## Name ## _Code } 6885 #define REG(Name) { kRegister_ ## Name ## _Code }
7148 6886
7149 static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = { 6887 static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
7150 // Used in RegExpExecStub. 6888 // Used in RegExpExecStub.
7151 { REG(s2), REG(s0), REG(t3), EMIT_REMEMBERED_SET }, 6889 { REG(s2), REG(s0), REG(t3), EMIT_REMEMBERED_SET },
7152 // Used in CompileArrayPushCall. 6890 // Used in CompileArrayPushCall.
7153 // Also used in StoreIC::GenerateNormal via GenerateDictionaryStore. 6891 // Also used in StoreIC::GenerateNormal via GenerateDictionaryStore.
7154 // Also used in KeyedStoreIC::GenerateGeneric. 6892 // Also used in KeyedStoreIC::GenerateGeneric.
7155 { REG(a3), REG(t0), REG(t1), EMIT_REMEMBERED_SET }, 6893 { REG(a3), REG(t0), REG(t1), EMIT_REMEMBERED_SET },
7156 // Used in CompileStoreGlobal. 6894 // Used in CompileStoreGlobal.
(...skipping 349 matching lines...) Expand 10 before | Expand all | Expand 10 after
7506 __ Addu(a1, a1, Operand(1)); 7244 __ Addu(a1, a1, Operand(1));
7507 } 7245 }
7508 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE); 7246 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
7509 __ sll(a1, a1, kPointerSizeLog2); 7247 __ sll(a1, a1, kPointerSizeLog2);
7510 __ Ret(USE_DELAY_SLOT); 7248 __ Ret(USE_DELAY_SLOT);
7511 __ Addu(sp, sp, a1); 7249 __ Addu(sp, sp, a1);
7512 } 7250 }
7513 7251
7514 7252
7515 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { 7253 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
7516 if (entry_hook_ != NULL) { 7254 if (masm->isolate()->function_entry_hook() != NULL) {
7255 AllowStubCallsScope allow_stub_calls(masm, true);
7517 ProfileEntryHookStub stub; 7256 ProfileEntryHookStub stub;
7518 __ push(ra); 7257 __ push(ra);
7519 __ CallStub(&stub); 7258 __ CallStub(&stub);
7520 __ pop(ra); 7259 __ pop(ra);
7521 } 7260 }
7522 } 7261 }
7523 7262
7524 7263
7525 void ProfileEntryHookStub::Generate(MacroAssembler* masm) { 7264 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
7526 // The entry hook is a "push ra" instruction, followed by a call. 7265 // The entry hook is a "push ra" instruction, followed by a call.
7527 // Note: on MIPS "push" is 2 instruction 7266 // Note: on MIPS "push" is 2 instruction
7528 const int32_t kReturnAddressDistanceFromFunctionStart = 7267 const int32_t kReturnAddressDistanceFromFunctionStart =
7529 Assembler::kCallTargetAddressOffset + (2 * Assembler::kInstrSize); 7268 Assembler::kCallTargetAddressOffset + (2 * Assembler::kInstrSize);
7530 7269
7531 // Save live volatile registers. 7270 // This should contain all kJSCallerSaved registers.
7532 __ Push(ra, t1, a1); 7271 const RegList kSavedRegs =
7533 const int32_t kNumSavedRegs = 3; 7272 kJSCallerSaved | // Caller saved registers.
7273 s5.bit(); // Saved stack pointer.
7274
7275 // We also save ra, so the count here is one higher than the mask indicates.
7276 const int32_t kNumSavedRegs = kNumJSCallerSaved + 2;
7277
7278 // Save all caller-save registers as this may be called from anywhere.
7279 __ MultiPush(kSavedRegs | ra.bit());
7534 7280
7535 // Compute the function's address for the first argument. 7281 // Compute the function's address for the first argument.
7536 __ Subu(a0, ra, Operand(kReturnAddressDistanceFromFunctionStart)); 7282 __ Subu(a0, ra, Operand(kReturnAddressDistanceFromFunctionStart));
7537 7283
7538 // The caller's return address is above the saved temporaries. 7284 // The caller's return address is above the saved temporaries.
7539 // Grab that for the second argument to the hook. 7285 // Grab that for the second argument to the hook.
7540 __ Addu(a1, sp, Operand(kNumSavedRegs * kPointerSize)); 7286 __ Addu(a1, sp, Operand(kNumSavedRegs * kPointerSize));
7541 7287
7542 // Align the stack if necessary. 7288 // Align the stack if necessary.
7543 int frame_alignment = masm->ActivationFrameAlignment(); 7289 int frame_alignment = masm->ActivationFrameAlignment();
7544 if (frame_alignment > kPointerSize) { 7290 if (frame_alignment > kPointerSize) {
7545 __ mov(t1, sp); 7291 __ mov(s5, sp);
7546 ASSERT(IsPowerOf2(frame_alignment)); 7292 ASSERT(IsPowerOf2(frame_alignment));
7547 __ And(sp, sp, Operand(-frame_alignment)); 7293 __ And(sp, sp, Operand(-frame_alignment));
7548 } 7294 }
7549 7295
7550 #if defined(V8_HOST_ARCH_MIPS) 7296 #if defined(V8_HOST_ARCH_MIPS)
7551 __ li(at, Operand(reinterpret_cast<int32_t>(&entry_hook_))); 7297 int32_t entry_hook =
7552 __ lw(at, MemOperand(at)); 7298 reinterpret_cast<int32_t>(masm->isolate()->function_entry_hook());
7299 __ li(at, Operand(entry_hook));
7553 #else 7300 #else
7554 // Under the simulator we need to indirect the entry hook through a 7301 // Under the simulator we need to indirect the entry hook through a
7555 // trampoline function at a known address. 7302 // trampoline function at a known address.
7556 Address trampoline_address = reinterpret_cast<Address>( 7303 ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
7557 reinterpret_cast<intptr_t>(EntryHookTrampoline));
7558 ApiFunction dispatcher(trampoline_address);
7559 __ li(at, Operand(ExternalReference(&dispatcher, 7304 __ li(at, Operand(ExternalReference(&dispatcher,
7560 ExternalReference::BUILTIN_CALL, 7305 ExternalReference::BUILTIN_CALL,
7561 masm->isolate()))); 7306 masm->isolate())));
7562 #endif 7307 #endif
7563 __ Call(at); 7308 __ Call(at);
7564 7309
7565 // Restore the stack pointer if needed. 7310 // Restore the stack pointer if needed.
7566 if (frame_alignment > kPointerSize) { 7311 if (frame_alignment > kPointerSize) {
7567 __ mov(sp, t1); 7312 __ mov(sp, s5);
7568 } 7313 }
7569 7314
7570 __ Pop(ra, t1, a1); 7315 // Also pop ra to get Ret(0).
7316 __ MultiPop(kSavedRegs | ra.bit());
7571 __ Ret(); 7317 __ Ret();
7572 } 7318 }
7573 7319
7574 7320
7575 template<class T> 7321 template<class T>
7576 static void CreateArrayDispatch(MacroAssembler* masm) { 7322 static void CreateArrayDispatch(MacroAssembler* masm) {
7577 int last_index = GetSequenceIndexFromFastElementsKind( 7323 int last_index = GetSequenceIndexFromFastElementsKind(
7578 TERMINAL_FAST_ELEMENTS_KIND); 7324 TERMINAL_FAST_ELEMENTS_KIND);
7579 for (int i = 0; i <= last_index; ++i) { 7325 for (int i = 0; i <= last_index; ++i) {
7580 Label next; 7326 Label next;
(...skipping 33 matching lines...) Expand 10 before | Expand all | Expand 10 after
7614 7360
7615 // look at the first argument 7361 // look at the first argument
7616 __ lw(t1, MemOperand(sp, 0)); 7362 __ lw(t1, MemOperand(sp, 0));
7617 __ Branch(&normal_sequence, eq, t1, Operand(zero_reg)); 7363 __ Branch(&normal_sequence, eq, t1, Operand(zero_reg));
7618 7364
7619 // We are going to create a holey array, but our kind is non-holey. 7365 // We are going to create a holey array, but our kind is non-holey.
7620 // Fix kind and retry 7366 // Fix kind and retry
7621 __ Addu(a3, a3, Operand(1)); 7367 __ Addu(a3, a3, Operand(1));
7622 __ Branch(&normal_sequence, eq, a2, Operand(undefined_sentinel)); 7368 __ Branch(&normal_sequence, eq, a2, Operand(undefined_sentinel));
7623 7369
7370 // The type cell may have gone megamorphic, don't overwrite if so.
7371 __ lw(t1, FieldMemOperand(a2, kPointerSize));
7372 __ JumpIfNotSmi(t1, &normal_sequence);
7373
7624 // Save the resulting elements kind in type info 7374 // Save the resulting elements kind in type info
7625 __ SmiTag(a3); 7375 __ SmiTag(a3);
7626 __ sw(a3, FieldMemOperand(a2, kPointerSize)); 7376 __ sw(a3, FieldMemOperand(a2, kPointerSize));
7627 __ SmiUntag(a3); 7377 __ SmiUntag(a3);
7628 7378
7629 __ bind(&normal_sequence); 7379 __ bind(&normal_sequence);
7630 int last_index = GetSequenceIndexFromFastElementsKind( 7380 int last_index = GetSequenceIndexFromFastElementsKind(
7631 TERMINAL_FAST_ELEMENTS_KIND); 7381 TERMINAL_FAST_ELEMENTS_KIND);
7632 for (int i = 0; i <= last_index; ++i) { 7382 for (int i = 0; i <= last_index; ++i) {
7633 Label next; 7383 Label next;
(...skipping 11 matching lines...) Expand all
7645 7395
7646 template<class T> 7396 template<class T>
7647 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { 7397 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
7648 int to_index = GetSequenceIndexFromFastElementsKind( 7398 int to_index = GetSequenceIndexFromFastElementsKind(
7649 TERMINAL_FAST_ELEMENTS_KIND); 7399 TERMINAL_FAST_ELEMENTS_KIND);
7650 for (int i = 0; i <= to_index; ++i) { 7400 for (int i = 0; i <= to_index; ++i) {
7651 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); 7401 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
7652 T stub(kind); 7402 T stub(kind);
7653 stub.GetCode(isolate)->set_is_pregenerated(true); 7403 stub.GetCode(isolate)->set_is_pregenerated(true);
7654 if (AllocationSiteInfo::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) { 7404 if (AllocationSiteInfo::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
7655 T stub1(kind, true); 7405 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES);
7656 stub1.GetCode(isolate)->set_is_pregenerated(true); 7406 stub1.GetCode(isolate)->set_is_pregenerated(true);
7657 } 7407 }
7658 } 7408 }
7659 } 7409 }
7660 7410
7661 7411
7662 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) { 7412 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
7663 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>( 7413 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
7664 isolate); 7414 isolate);
7665 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>( 7415 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
(...skipping 170 matching lines...) Expand 10 before | Expand all | Expand 10 after
7836 __ bind(&fast_elements_case); 7586 __ bind(&fast_elements_case);
7837 GenerateCase(masm, FAST_ELEMENTS); 7587 GenerateCase(masm, FAST_ELEMENTS);
7838 } 7588 }
7839 7589
7840 7590
7841 #undef __ 7591 #undef __
7842 7592
7843 } } // namespace v8::internal 7593 } } // namespace v8::internal
7844 7594
7845 #endif // V8_TARGET_ARCH_MIPS 7595 #endif // V8_TARGET_ARCH_MIPS
OLDNEW
« no previous file with comments | « src/mips/code-stubs-mips.h ('k') | src/mips/codegen-mips.cc » ('j') | no next file with comments »

Powered by Google App Engine
This is Rietveld 408576698