OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 1573 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1584 LoadWithVectorDescriptor::SlotRegister())); | 1584 LoadWithVectorDescriptor::SlotRegister())); |
1585 | 1585 |
1586 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, t0, | 1586 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, t0, |
1587 t1, &miss); | 1587 t1, &miss); |
1588 __ bind(&miss); | 1588 __ bind(&miss); |
1589 PropertyAccessCompiler::TailCallBuiltin( | 1589 PropertyAccessCompiler::TailCallBuiltin( |
1590 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC)); | 1590 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC)); |
1591 } | 1591 } |
1592 | 1592 |
1593 | 1593 |
| 1594 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { |
| 1595 // a1 : function |
| 1596 // a2 : number of parameters (tagged) |
| 1597 // a3 : parameters pointer |
| 1598 |
| 1599 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function())); |
| 1600 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count())); |
| 1601 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); |
| 1602 |
| 1603 // Check if the calling frame is an arguments adaptor frame. |
| 1604 Label runtime; |
| 1605 __ lw(t0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 1606 __ lw(a0, MemOperand(t0, StandardFrameConstants::kContextOffset)); |
| 1607 __ Branch(&runtime, ne, a0, |
| 1608 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 1609 |
| 1610 // Patch the arguments.length and the parameters pointer in the current frame. |
| 1611 __ lw(a2, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 1612 __ Lsa(t0, t0, a2, 1); |
| 1613 __ addiu(a3, t0, StandardFrameConstants::kCallerSPOffset); |
| 1614 |
| 1615 __ bind(&runtime); |
| 1616 __ Push(a1, a3, a2); |
| 1617 __ TailCallRuntime(Runtime::kNewSloppyArguments); |
| 1618 } |
| 1619 |
| 1620 |
| 1621 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { |
| 1622 // a1 : function |
| 1623 // a2 : number of parameters (tagged) |
| 1624 // a3 : parameters pointer |
| 1625 // Registers used over whole function: |
| 1626 // t1 : arguments count (tagged) |
| 1627 // t2 : mapped parameter count (tagged) |
| 1628 |
| 1629 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function())); |
| 1630 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count())); |
| 1631 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); |
| 1632 |
| 1633 // Check if the calling frame is an arguments adaptor frame. |
| 1634 Label adaptor_frame, try_allocate, runtime; |
| 1635 __ lw(t0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 1636 __ lw(a0, MemOperand(t0, StandardFrameConstants::kContextOffset)); |
| 1637 __ Branch(&adaptor_frame, eq, a0, |
| 1638 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 1639 |
| 1640 // No adaptor, parameter count = argument count. |
| 1641 __ mov(t1, a2); |
| 1642 __ Branch(USE_DELAY_SLOT, &try_allocate); |
| 1643 __ mov(t2, a2); // In delay slot. |
| 1644 |
| 1645 // We have an adaptor frame. Patch the parameters pointer. |
| 1646 __ bind(&adaptor_frame); |
| 1647 __ lw(t1, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 1648 __ Lsa(t0, t0, t1, 1); |
| 1649 __ Addu(a3, t0, Operand(StandardFrameConstants::kCallerSPOffset)); |
| 1650 |
| 1651 // t1 = argument count (tagged) |
| 1652 // t2 = parameter count (tagged) |
| 1653 // Compute the mapped parameter count = min(t2, t1) in t2. |
| 1654 __ mov(t2, a2); |
| 1655 __ Branch(&try_allocate, le, t2, Operand(t1)); |
| 1656 __ mov(t2, t1); |
| 1657 |
| 1658 __ bind(&try_allocate); |
| 1659 |
| 1660 // Compute the sizes of backing store, parameter map, and arguments object. |
| 1661 // 1. Parameter map, has 2 extra words containing context and backing store. |
| 1662 const int kParameterMapHeaderSize = |
| 1663 FixedArray::kHeaderSize + 2 * kPointerSize; |
| 1664 // If there are no mapped parameters, we do not need the parameter_map. |
| 1665 Label param_map_size; |
| 1666 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0)); |
| 1667 __ Branch(USE_DELAY_SLOT, ¶m_map_size, eq, t2, Operand(zero_reg)); |
| 1668 __ mov(t5, zero_reg); // In delay slot: param map size = 0 when t2 == 0. |
| 1669 __ sll(t5, t2, 1); |
| 1670 __ addiu(t5, t5, kParameterMapHeaderSize); |
| 1671 __ bind(¶m_map_size); |
| 1672 |
| 1673 // 2. Backing store. |
| 1674 __ Lsa(t5, t5, t1, 1); |
| 1675 __ Addu(t5, t5, Operand(FixedArray::kHeaderSize)); |
| 1676 |
| 1677 // 3. Arguments object. |
| 1678 __ Addu(t5, t5, Operand(JSSloppyArgumentsObject::kSize)); |
| 1679 |
| 1680 // Do the allocation of all three objects in one go. |
| 1681 __ Allocate(t5, v0, t5, t0, &runtime, TAG_OBJECT); |
| 1682 |
| 1683 // v0 = address of new object(s) (tagged) |
| 1684 // a2 = argument count (smi-tagged) |
| 1685 // Get the arguments boilerplate from the current native context into t0. |
| 1686 const int kNormalOffset = |
| 1687 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX); |
| 1688 const int kAliasedOffset = |
| 1689 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX); |
| 1690 |
| 1691 __ lw(t0, NativeContextMemOperand()); |
| 1692 Label skip2_ne, skip2_eq; |
| 1693 __ Branch(&skip2_ne, ne, t2, Operand(zero_reg)); |
| 1694 __ lw(t0, MemOperand(t0, kNormalOffset)); |
| 1695 __ bind(&skip2_ne); |
| 1696 |
| 1697 __ Branch(&skip2_eq, eq, t2, Operand(zero_reg)); |
| 1698 __ lw(t0, MemOperand(t0, kAliasedOffset)); |
| 1699 __ bind(&skip2_eq); |
| 1700 |
| 1701 // v0 = address of new object (tagged) |
| 1702 // a2 = argument count (smi-tagged) |
| 1703 // t0 = address of arguments map (tagged) |
| 1704 // t2 = mapped parameter count (tagged) |
| 1705 __ sw(t0, FieldMemOperand(v0, JSObject::kMapOffset)); |
| 1706 __ LoadRoot(t5, Heap::kEmptyFixedArrayRootIndex); |
| 1707 __ sw(t5, FieldMemOperand(v0, JSObject::kPropertiesOffset)); |
| 1708 __ sw(t5, FieldMemOperand(v0, JSObject::kElementsOffset)); |
| 1709 |
| 1710 // Set up the callee in-object property. |
| 1711 __ AssertNotSmi(a1); |
| 1712 __ sw(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset)); |
| 1713 |
| 1714 // Use the length (smi tagged) and set that as an in-object property too. |
| 1715 __ AssertSmi(t1); |
| 1716 __ sw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset)); |
| 1717 |
| 1718 // Set up the elements pointer in the allocated arguments object. |
| 1719 // If we allocated a parameter map, t0 will point there, otherwise |
| 1720 // it will point to the backing store. |
| 1721 __ Addu(t0, v0, Operand(JSSloppyArgumentsObject::kSize)); |
| 1722 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); |
| 1723 |
| 1724 // v0 = address of new object (tagged) |
| 1725 // a2 = argument count (tagged) |
| 1726 // t0 = address of parameter map or backing store (tagged) |
| 1727 // t2 = mapped parameter count (tagged) |
| 1728 // Initialize parameter map. If there are no mapped arguments, we're done. |
| 1729 Label skip_parameter_map; |
| 1730 Label skip3; |
| 1731 __ Branch(&skip3, ne, t2, Operand(Smi::FromInt(0))); |
| 1732 // Move backing store address to a1, because it is |
| 1733 // expected there when filling in the unmapped arguments. |
| 1734 __ mov(a1, t0); |
| 1735 __ bind(&skip3); |
| 1736 |
| 1737 __ Branch(&skip_parameter_map, eq, t2, Operand(Smi::FromInt(0))); |
| 1738 |
| 1739 __ LoadRoot(t1, Heap::kSloppyArgumentsElementsMapRootIndex); |
| 1740 __ sw(t1, FieldMemOperand(t0, FixedArray::kMapOffset)); |
| 1741 __ Addu(t1, t2, Operand(Smi::FromInt(2))); |
| 1742 __ sw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset)); |
| 1743 __ sw(cp, FieldMemOperand(t0, FixedArray::kHeaderSize + 0 * kPointerSize)); |
| 1744 __ Lsa(t1, t0, t2, 1); |
| 1745 __ Addu(t1, t1, Operand(kParameterMapHeaderSize)); |
| 1746 __ sw(t1, FieldMemOperand(t0, FixedArray::kHeaderSize + 1 * kPointerSize)); |
| 1747 |
| 1748 // Copy the parameter slots and the holes in the arguments. |
| 1749 // We need to fill in mapped_parameter_count slots. They index the context, |
| 1750 // where parameters are stored in reverse order, at |
| 1751 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 |
| 1752 // The mapped parameter thus need to get indices |
| 1753 // MIN_CONTEXT_SLOTS+parameter_count-1 .. |
| 1754 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count |
| 1755 // We loop from right to left. |
| 1756 Label parameters_loop, parameters_test; |
| 1757 __ mov(t1, t2); |
| 1758 __ Addu(t5, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); |
| 1759 __ Subu(t5, t5, Operand(t2)); |
| 1760 __ LoadRoot(t3, Heap::kTheHoleValueRootIndex); |
| 1761 __ Lsa(a1, t0, t1, 1); |
| 1762 __ Addu(a1, a1, Operand(kParameterMapHeaderSize)); |
| 1763 |
| 1764 // a1 = address of backing store (tagged) |
| 1765 // t0 = address of parameter map (tagged) |
| 1766 // a0 = temporary scratch (a.o., for address calculation) |
| 1767 // t1 = loop variable (tagged) |
| 1768 // t3 = the hole value |
| 1769 __ jmp(¶meters_test); |
| 1770 |
| 1771 __ bind(¶meters_loop); |
| 1772 __ Subu(t1, t1, Operand(Smi::FromInt(1))); |
| 1773 __ sll(a0, t1, 1); |
| 1774 __ Addu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag)); |
| 1775 __ Addu(t6, t0, a0); |
| 1776 __ sw(t5, MemOperand(t6)); |
| 1777 __ Subu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize)); |
| 1778 __ Addu(t6, a1, a0); |
| 1779 __ sw(t3, MemOperand(t6)); |
| 1780 __ Addu(t5, t5, Operand(Smi::FromInt(1))); |
| 1781 __ bind(¶meters_test); |
| 1782 __ Branch(¶meters_loop, ne, t1, Operand(Smi::FromInt(0))); |
| 1783 |
| 1784 // t1 = argument count (tagged). |
| 1785 __ lw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset)); |
| 1786 |
| 1787 __ bind(&skip_parameter_map); |
| 1788 // v0 = address of new object (tagged) |
| 1789 // a1 = address of backing store (tagged) |
| 1790 // t1 = argument count (tagged) |
| 1791 // t2 = mapped parameter count (tagged) |
| 1792 // t5 = scratch |
| 1793 // Copy arguments header and remaining slots (if there are any). |
| 1794 __ LoadRoot(t5, Heap::kFixedArrayMapRootIndex); |
| 1795 __ sw(t5, FieldMemOperand(a1, FixedArray::kMapOffset)); |
| 1796 __ sw(t1, FieldMemOperand(a1, FixedArray::kLengthOffset)); |
| 1797 |
| 1798 Label arguments_loop, arguments_test; |
| 1799 __ sll(t6, t2, 1); |
| 1800 __ Subu(a3, a3, Operand(t6)); |
| 1801 __ jmp(&arguments_test); |
| 1802 |
| 1803 __ bind(&arguments_loop); |
| 1804 __ Subu(a3, a3, Operand(kPointerSize)); |
| 1805 __ lw(t0, MemOperand(a3, 0)); |
| 1806 __ Lsa(t5, a1, t2, 1); |
| 1807 __ sw(t0, FieldMemOperand(t5, FixedArray::kHeaderSize)); |
| 1808 __ Addu(t2, t2, Operand(Smi::FromInt(1))); |
| 1809 |
| 1810 __ bind(&arguments_test); |
| 1811 __ Branch(&arguments_loop, lt, t2, Operand(t1)); |
| 1812 |
| 1813 // Return. |
| 1814 __ Ret(); |
| 1815 |
| 1816 // Do the runtime call to allocate the arguments object. |
| 1817 // t1 = argument count (tagged) |
| 1818 __ bind(&runtime); |
| 1819 __ Push(a1, a3, t1); |
| 1820 __ TailCallRuntime(Runtime::kNewSloppyArguments); |
| 1821 } |
| 1822 |
| 1823 |
1594 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { | 1824 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { |
1595 // Return address is in ra. | 1825 // Return address is in ra. |
1596 Label slow; | 1826 Label slow; |
1597 | 1827 |
1598 Register receiver = LoadDescriptor::ReceiverRegister(); | 1828 Register receiver = LoadDescriptor::ReceiverRegister(); |
1599 Register key = LoadDescriptor::NameRegister(); | 1829 Register key = LoadDescriptor::NameRegister(); |
1600 | 1830 |
1601 // Check that the key is an array index, that is Uint32. | 1831 // Check that the key is an array index, that is Uint32. |
1602 __ And(t0, key, Operand(kSmiTagMask | kSmiSignMask)); | 1832 __ And(t0, key, Operand(kSmiTagMask | kSmiSignMask)); |
1603 __ Branch(&slow, ne, t0, Operand(zero_reg)); | 1833 __ Branch(&slow, ne, t0, Operand(zero_reg)); |
(...skipping 3300 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
4904 __ SmiTag(a1); | 5134 __ SmiTag(a1); |
4905 __ Push(a0, a2, a1); | 5135 __ Push(a0, a2, a1); |
4906 __ CallRuntime(Runtime::kAllocateInNewSpace); | 5136 __ CallRuntime(Runtime::kAllocateInNewSpace); |
4907 __ Pop(a0, a2); | 5137 __ Pop(a0, a2); |
4908 } | 5138 } |
4909 __ jmp(&done_allocate); | 5139 __ jmp(&done_allocate); |
4910 } | 5140 } |
4911 } | 5141 } |
4912 | 5142 |
4913 | 5143 |
4914 void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) { | |
4915 // ----------- S t a t e ------------- | |
4916 // -- a1 : function | |
4917 // -- cp : context | |
4918 // -- fp : frame pointer | |
4919 // -- ra : return address | |
4920 // ----------------------------------- | |
4921 __ AssertFunction(a1); | |
4922 | |
4923 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub. | |
4924 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); | |
4925 __ lw(a2, | |
4926 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset)); | |
4927 __ Lsa(a3, fp, a2, kPointerSizeLog2 - 1); | |
4928 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset)); | |
4929 | |
4930 // a1 : function | |
4931 // a2 : number of parameters (tagged) | |
4932 // a3 : parameters pointer | |
4933 // Registers used over whole function: | |
4934 // t1 : arguments count (tagged) | |
4935 // t2 : mapped parameter count (tagged) | |
4936 | |
4937 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function())); | |
4938 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count())); | |
4939 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); | |
4940 | |
4941 // Check if the calling frame is an arguments adaptor frame. | |
4942 Label adaptor_frame, try_allocate, runtime; | |
4943 __ lw(t0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
4944 __ lw(a0, MemOperand(t0, StandardFrameConstants::kContextOffset)); | |
4945 __ Branch(&adaptor_frame, eq, a0, | |
4946 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | |
4947 | |
4948 // No adaptor, parameter count = argument count. | |
4949 __ mov(t1, a2); | |
4950 __ Branch(USE_DELAY_SLOT, &try_allocate); | |
4951 __ mov(t2, a2); // In delay slot. | |
4952 | |
4953 // We have an adaptor frame. Patch the parameters pointer. | |
4954 __ bind(&adaptor_frame); | |
4955 __ lw(t1, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset)); | |
4956 __ Lsa(t0, t0, t1, 1); | |
4957 __ Addu(a3, t0, Operand(StandardFrameConstants::kCallerSPOffset)); | |
4958 | |
4959 // t1 = argument count (tagged) | |
4960 // t2 = parameter count (tagged) | |
4961 // Compute the mapped parameter count = min(t2, t1) in t2. | |
4962 __ mov(t2, a2); | |
4963 __ Branch(&try_allocate, le, t2, Operand(t1)); | |
4964 __ mov(t2, t1); | |
4965 | |
4966 __ bind(&try_allocate); | |
4967 | |
4968 // Compute the sizes of backing store, parameter map, and arguments object. | |
4969 // 1. Parameter map, has 2 extra words containing context and backing store. | |
4970 const int kParameterMapHeaderSize = | |
4971 FixedArray::kHeaderSize + 2 * kPointerSize; | |
4972 // If there are no mapped parameters, we do not need the parameter_map. | |
4973 Label param_map_size; | |
4974 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0)); | |
4975 __ Branch(USE_DELAY_SLOT, ¶m_map_size, eq, t2, Operand(zero_reg)); | |
4976 __ mov(t5, zero_reg); // In delay slot: param map size = 0 when t2 == 0. | |
4977 __ sll(t5, t2, 1); | |
4978 __ addiu(t5, t5, kParameterMapHeaderSize); | |
4979 __ bind(¶m_map_size); | |
4980 | |
4981 // 2. Backing store. | |
4982 __ Lsa(t5, t5, t1, 1); | |
4983 __ Addu(t5, t5, Operand(FixedArray::kHeaderSize)); | |
4984 | |
4985 // 3. Arguments object. | |
4986 __ Addu(t5, t5, Operand(JSSloppyArgumentsObject::kSize)); | |
4987 | |
4988 // Do the allocation of all three objects in one go. | |
4989 __ Allocate(t5, v0, t5, t0, &runtime, TAG_OBJECT); | |
4990 | |
4991 // v0 = address of new object(s) (tagged) | |
4992 // a2 = argument count (smi-tagged) | |
4993 // Get the arguments boilerplate from the current native context into t0. | |
4994 const int kNormalOffset = | |
4995 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX); | |
4996 const int kAliasedOffset = | |
4997 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX); | |
4998 | |
4999 __ lw(t0, NativeContextMemOperand()); | |
5000 Label skip2_ne, skip2_eq; | |
5001 __ Branch(&skip2_ne, ne, t2, Operand(zero_reg)); | |
5002 __ lw(t0, MemOperand(t0, kNormalOffset)); | |
5003 __ bind(&skip2_ne); | |
5004 | |
5005 __ Branch(&skip2_eq, eq, t2, Operand(zero_reg)); | |
5006 __ lw(t0, MemOperand(t0, kAliasedOffset)); | |
5007 __ bind(&skip2_eq); | |
5008 | |
5009 // v0 = address of new object (tagged) | |
5010 // a2 = argument count (smi-tagged) | |
5011 // t0 = address of arguments map (tagged) | |
5012 // t2 = mapped parameter count (tagged) | |
5013 __ sw(t0, FieldMemOperand(v0, JSObject::kMapOffset)); | |
5014 __ LoadRoot(t5, Heap::kEmptyFixedArrayRootIndex); | |
5015 __ sw(t5, FieldMemOperand(v0, JSObject::kPropertiesOffset)); | |
5016 __ sw(t5, FieldMemOperand(v0, JSObject::kElementsOffset)); | |
5017 | |
5018 // Set up the callee in-object property. | |
5019 __ AssertNotSmi(a1); | |
5020 __ sw(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset)); | |
5021 | |
5022 // Use the length (smi tagged) and set that as an in-object property too. | |
5023 __ AssertSmi(t1); | |
5024 __ sw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset)); | |
5025 | |
5026 // Set up the elements pointer in the allocated arguments object. | |
5027 // If we allocated a parameter map, t0 will point there, otherwise | |
5028 // it will point to the backing store. | |
5029 __ Addu(t0, v0, Operand(JSSloppyArgumentsObject::kSize)); | |
5030 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); | |
5031 | |
5032 // v0 = address of new object (tagged) | |
5033 // a2 = argument count (tagged) | |
5034 // t0 = address of parameter map or backing store (tagged) | |
5035 // t2 = mapped parameter count (tagged) | |
5036 // Initialize parameter map. If there are no mapped arguments, we're done. | |
5037 Label skip_parameter_map; | |
5038 Label skip3; | |
5039 __ Branch(&skip3, ne, t2, Operand(Smi::FromInt(0))); | |
5040 // Move backing store address to a1, because it is | |
5041 // expected there when filling in the unmapped arguments. | |
5042 __ mov(a1, t0); | |
5043 __ bind(&skip3); | |
5044 | |
5045 __ Branch(&skip_parameter_map, eq, t2, Operand(Smi::FromInt(0))); | |
5046 | |
5047 __ LoadRoot(t1, Heap::kSloppyArgumentsElementsMapRootIndex); | |
5048 __ sw(t1, FieldMemOperand(t0, FixedArray::kMapOffset)); | |
5049 __ Addu(t1, t2, Operand(Smi::FromInt(2))); | |
5050 __ sw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset)); | |
5051 __ sw(cp, FieldMemOperand(t0, FixedArray::kHeaderSize + 0 * kPointerSize)); | |
5052 __ Lsa(t1, t0, t2, 1); | |
5053 __ Addu(t1, t1, Operand(kParameterMapHeaderSize)); | |
5054 __ sw(t1, FieldMemOperand(t0, FixedArray::kHeaderSize + 1 * kPointerSize)); | |
5055 | |
5056 // Copy the parameter slots and the holes in the arguments. | |
5057 // We need to fill in mapped_parameter_count slots. They index the context, | |
5058 // where parameters are stored in reverse order, at | |
5059 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 | |
5060 // The mapped parameter thus need to get indices | |
5061 // MIN_CONTEXT_SLOTS+parameter_count-1 .. | |
5062 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count | |
5063 // We loop from right to left. | |
5064 Label parameters_loop, parameters_test; | |
5065 __ mov(t1, t2); | |
5066 __ Addu(t5, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); | |
5067 __ Subu(t5, t5, Operand(t2)); | |
5068 __ LoadRoot(t3, Heap::kTheHoleValueRootIndex); | |
5069 __ Lsa(a1, t0, t1, 1); | |
5070 __ Addu(a1, a1, Operand(kParameterMapHeaderSize)); | |
5071 | |
5072 // a1 = address of backing store (tagged) | |
5073 // t0 = address of parameter map (tagged) | |
5074 // a0 = temporary scratch (a.o., for address calculation) | |
5075 // t1 = loop variable (tagged) | |
5076 // t3 = the hole value | |
5077 __ jmp(¶meters_test); | |
5078 | |
5079 __ bind(¶meters_loop); | |
5080 __ Subu(t1, t1, Operand(Smi::FromInt(1))); | |
5081 __ sll(a0, t1, 1); | |
5082 __ Addu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag)); | |
5083 __ Addu(t6, t0, a0); | |
5084 __ sw(t5, MemOperand(t6)); | |
5085 __ Subu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize)); | |
5086 __ Addu(t6, a1, a0); | |
5087 __ sw(t3, MemOperand(t6)); | |
5088 __ Addu(t5, t5, Operand(Smi::FromInt(1))); | |
5089 __ bind(¶meters_test); | |
5090 __ Branch(¶meters_loop, ne, t1, Operand(Smi::FromInt(0))); | |
5091 | |
5092 // t1 = argument count (tagged). | |
5093 __ lw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset)); | |
5094 | |
5095 __ bind(&skip_parameter_map); | |
5096 // v0 = address of new object (tagged) | |
5097 // a1 = address of backing store (tagged) | |
5098 // t1 = argument count (tagged) | |
5099 // t2 = mapped parameter count (tagged) | |
5100 // t5 = scratch | |
5101 // Copy arguments header and remaining slots (if there are any). | |
5102 __ LoadRoot(t5, Heap::kFixedArrayMapRootIndex); | |
5103 __ sw(t5, FieldMemOperand(a1, FixedArray::kMapOffset)); | |
5104 __ sw(t1, FieldMemOperand(a1, FixedArray::kLengthOffset)); | |
5105 | |
5106 Label arguments_loop, arguments_test; | |
5107 __ sll(t6, t2, 1); | |
5108 __ Subu(a3, a3, Operand(t6)); | |
5109 __ jmp(&arguments_test); | |
5110 | |
5111 __ bind(&arguments_loop); | |
5112 __ Subu(a3, a3, Operand(kPointerSize)); | |
5113 __ lw(t0, MemOperand(a3, 0)); | |
5114 __ Lsa(t5, a1, t2, 1); | |
5115 __ sw(t0, FieldMemOperand(t5, FixedArray::kHeaderSize)); | |
5116 __ Addu(t2, t2, Operand(Smi::FromInt(1))); | |
5117 | |
5118 __ bind(&arguments_test); | |
5119 __ Branch(&arguments_loop, lt, t2, Operand(t1)); | |
5120 | |
5121 // Return. | |
5122 __ Ret(); | |
5123 | |
5124 // Do the runtime call to allocate the arguments object. | |
5125 // t1 = argument count (tagged) | |
5126 __ bind(&runtime); | |
5127 __ Push(a1, a3, t1); | |
5128 __ TailCallRuntime(Runtime::kNewSloppyArguments); | |
5129 } | |
5130 | |
5131 | |
5132 void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { | 5144 void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { |
5133 // ----------- S t a t e ------------- | 5145 // ----------- S t a t e ------------- |
5134 // -- a1 : function | 5146 // -- a1 : function |
5135 // -- cp : context | 5147 // -- cp : context |
5136 // -- fp : frame pointer | 5148 // -- fp : frame pointer |
5137 // -- ra : return address | 5149 // -- ra : return address |
5138 // ----------------------------------- | 5150 // ----------------------------------- |
5139 __ AssertFunction(a1); | 5151 __ AssertFunction(a1); |
5140 | 5152 |
5141 // For Ignition we need to skip all possible handler/stub frames until | 5153 // For Ignition we need to skip all possible handler/stub frames until |
(...skipping 545 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5687 return_value_operand, NULL); | 5699 return_value_operand, NULL); |
5688 } | 5700 } |
5689 | 5701 |
5690 | 5702 |
5691 #undef __ | 5703 #undef __ |
5692 | 5704 |
5693 } // namespace internal | 5705 } // namespace internal |
5694 } // namespace v8 | 5706 } // namespace v8 |
5695 | 5707 |
5696 #endif // V8_TARGET_ARCH_MIPS | 5708 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |