OLD | NEW |
1 // Copyright 2012 the V8 project authors. All rights reserved. | 1 // Copyright 2012 the V8 project authors. All rights reserved. |
2 // Use of this source code is governed by a BSD-style license that can be | 2 // Use of this source code is governed by a BSD-style license that can be |
3 // found in the LICENSE file. | 3 // found in the LICENSE file. |
4 | 4 |
5 #if V8_TARGET_ARCH_MIPS | 5 #if V8_TARGET_ARCH_MIPS |
6 | 6 |
7 #include "src/base/bits.h" | 7 #include "src/base/bits.h" |
8 #include "src/bootstrapper.h" | 8 #include "src/bootstrapper.h" |
9 #include "src/code-stubs.h" | 9 #include "src/code-stubs.h" |
10 #include "src/codegen.h" | 10 #include "src/codegen.h" |
(...skipping 1573 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
1584 LoadWithVectorDescriptor::SlotRegister())); | 1584 LoadWithVectorDescriptor::SlotRegister())); |
1585 | 1585 |
1586 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, t0, | 1586 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, t0, |
1587 t1, &miss); | 1587 t1, &miss); |
1588 __ bind(&miss); | 1588 __ bind(&miss); |
1589 PropertyAccessCompiler::TailCallBuiltin( | 1589 PropertyAccessCompiler::TailCallBuiltin( |
1590 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC)); | 1590 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC)); |
1591 } | 1591 } |
1592 | 1592 |
1593 | 1593 |
1594 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) { | |
1595 // a1 : function | |
1596 // a2 : number of parameters (tagged) | |
1597 // a3 : parameters pointer | |
1598 | |
1599 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function())); | |
1600 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count())); | |
1601 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); | |
1602 | |
1603 // Check if the calling frame is an arguments adaptor frame. | |
1604 Label runtime; | |
1605 __ lw(t0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
1606 __ lw(a0, MemOperand(t0, StandardFrameConstants::kContextOffset)); | |
1607 __ Branch(&runtime, ne, a0, | |
1608 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | |
1609 | |
1610 // Patch the arguments.length and the parameters pointer in the current frame. | |
1611 __ lw(a2, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset)); | |
1612 __ Lsa(t0, t0, a2, 1); | |
1613 __ addiu(a3, t0, StandardFrameConstants::kCallerSPOffset); | |
1614 | |
1615 __ bind(&runtime); | |
1616 __ Push(a1, a3, a2); | |
1617 __ TailCallRuntime(Runtime::kNewSloppyArguments); | |
1618 } | |
1619 | |
1620 | |
1621 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) { | |
1622 // a1 : function | |
1623 // a2 : number of parameters (tagged) | |
1624 // a3 : parameters pointer | |
1625 // Registers used over whole function: | |
1626 // t1 : arguments count (tagged) | |
1627 // t2 : mapped parameter count (tagged) | |
1628 | |
1629 DCHECK(a1.is(ArgumentsAccessNewDescriptor::function())); | |
1630 DCHECK(a2.is(ArgumentsAccessNewDescriptor::parameter_count())); | |
1631 DCHECK(a3.is(ArgumentsAccessNewDescriptor::parameter_pointer())); | |
1632 | |
1633 // Check if the calling frame is an arguments adaptor frame. | |
1634 Label adaptor_frame, try_allocate, runtime; | |
1635 __ lw(t0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); | |
1636 __ lw(a0, MemOperand(t0, StandardFrameConstants::kContextOffset)); | |
1637 __ Branch(&adaptor_frame, eq, a0, | |
1638 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); | |
1639 | |
1640 // No adaptor, parameter count = argument count. | |
1641 __ mov(t1, a2); | |
1642 __ Branch(USE_DELAY_SLOT, &try_allocate); | |
1643 __ mov(t2, a2); // In delay slot. | |
1644 | |
1645 // We have an adaptor frame. Patch the parameters pointer. | |
1646 __ bind(&adaptor_frame); | |
1647 __ lw(t1, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset)); | |
1648 __ Lsa(t0, t0, t1, 1); | |
1649 __ Addu(a3, t0, Operand(StandardFrameConstants::kCallerSPOffset)); | |
1650 | |
1651 // t1 = argument count (tagged) | |
1652 // t2 = parameter count (tagged) | |
1653 // Compute the mapped parameter count = min(t2, t1) in t2. | |
1654 __ mov(t2, a2); | |
1655 __ Branch(&try_allocate, le, t2, Operand(t1)); | |
1656 __ mov(t2, t1); | |
1657 | |
1658 __ bind(&try_allocate); | |
1659 | |
1660 // Compute the sizes of backing store, parameter map, and arguments object. | |
1661 // 1. Parameter map, has 2 extra words containing context and backing store. | |
1662 const int kParameterMapHeaderSize = | |
1663 FixedArray::kHeaderSize + 2 * kPointerSize; | |
1664 // If there are no mapped parameters, we do not need the parameter_map. | |
1665 Label param_map_size; | |
1666 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0)); | |
1667 __ Branch(USE_DELAY_SLOT, ¶m_map_size, eq, t2, Operand(zero_reg)); | |
1668 __ mov(t5, zero_reg); // In delay slot: param map size = 0 when t2 == 0. | |
1669 __ sll(t5, t2, 1); | |
1670 __ addiu(t5, t5, kParameterMapHeaderSize); | |
1671 __ bind(¶m_map_size); | |
1672 | |
1673 // 2. Backing store. | |
1674 __ Lsa(t5, t5, t1, 1); | |
1675 __ Addu(t5, t5, Operand(FixedArray::kHeaderSize)); | |
1676 | |
1677 // 3. Arguments object. | |
1678 __ Addu(t5, t5, Operand(JSSloppyArgumentsObject::kSize)); | |
1679 | |
1680 // Do the allocation of all three objects in one go. | |
1681 __ Allocate(t5, v0, t5, t0, &runtime, TAG_OBJECT); | |
1682 | |
1683 // v0 = address of new object(s) (tagged) | |
1684 // a2 = argument count (smi-tagged) | |
1685 // Get the arguments boilerplate from the current native context into t0. | |
1686 const int kNormalOffset = | |
1687 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX); | |
1688 const int kAliasedOffset = | |
1689 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX); | |
1690 | |
1691 __ lw(t0, NativeContextMemOperand()); | |
1692 Label skip2_ne, skip2_eq; | |
1693 __ Branch(&skip2_ne, ne, t2, Operand(zero_reg)); | |
1694 __ lw(t0, MemOperand(t0, kNormalOffset)); | |
1695 __ bind(&skip2_ne); | |
1696 | |
1697 __ Branch(&skip2_eq, eq, t2, Operand(zero_reg)); | |
1698 __ lw(t0, MemOperand(t0, kAliasedOffset)); | |
1699 __ bind(&skip2_eq); | |
1700 | |
1701 // v0 = address of new object (tagged) | |
1702 // a2 = argument count (smi-tagged) | |
1703 // t0 = address of arguments map (tagged) | |
1704 // t2 = mapped parameter count (tagged) | |
1705 __ sw(t0, FieldMemOperand(v0, JSObject::kMapOffset)); | |
1706 __ LoadRoot(t5, Heap::kEmptyFixedArrayRootIndex); | |
1707 __ sw(t5, FieldMemOperand(v0, JSObject::kPropertiesOffset)); | |
1708 __ sw(t5, FieldMemOperand(v0, JSObject::kElementsOffset)); | |
1709 | |
1710 // Set up the callee in-object property. | |
1711 __ AssertNotSmi(a1); | |
1712 __ sw(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset)); | |
1713 | |
1714 // Use the length (smi tagged) and set that as an in-object property too. | |
1715 __ AssertSmi(t1); | |
1716 __ sw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset)); | |
1717 | |
1718 // Set up the elements pointer in the allocated arguments object. | |
1719 // If we allocated a parameter map, t0 will point there, otherwise | |
1720 // it will point to the backing store. | |
1721 __ Addu(t0, v0, Operand(JSSloppyArgumentsObject::kSize)); | |
1722 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); | |
1723 | |
1724 // v0 = address of new object (tagged) | |
1725 // a2 = argument count (tagged) | |
1726 // t0 = address of parameter map or backing store (tagged) | |
1727 // t2 = mapped parameter count (tagged) | |
1728 // Initialize parameter map. If there are no mapped arguments, we're done. | |
1729 Label skip_parameter_map; | |
1730 Label skip3; | |
1731 __ Branch(&skip3, ne, t2, Operand(Smi::FromInt(0))); | |
1732 // Move backing store address to a1, because it is | |
1733 // expected there when filling in the unmapped arguments. | |
1734 __ mov(a1, t0); | |
1735 __ bind(&skip3); | |
1736 | |
1737 __ Branch(&skip_parameter_map, eq, t2, Operand(Smi::FromInt(0))); | |
1738 | |
1739 __ LoadRoot(t1, Heap::kSloppyArgumentsElementsMapRootIndex); | |
1740 __ sw(t1, FieldMemOperand(t0, FixedArray::kMapOffset)); | |
1741 __ Addu(t1, t2, Operand(Smi::FromInt(2))); | |
1742 __ sw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset)); | |
1743 __ sw(cp, FieldMemOperand(t0, FixedArray::kHeaderSize + 0 * kPointerSize)); | |
1744 __ Lsa(t1, t0, t2, 1); | |
1745 __ Addu(t1, t1, Operand(kParameterMapHeaderSize)); | |
1746 __ sw(t1, FieldMemOperand(t0, FixedArray::kHeaderSize + 1 * kPointerSize)); | |
1747 | |
1748 // Copy the parameter slots and the holes in the arguments. | |
1749 // We need to fill in mapped_parameter_count slots. They index the context, | |
1750 // where parameters are stored in reverse order, at | |
1751 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 | |
1752 // The mapped parameter thus need to get indices | |
1753 // MIN_CONTEXT_SLOTS+parameter_count-1 .. | |
1754 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count | |
1755 // We loop from right to left. | |
1756 Label parameters_loop, parameters_test; | |
1757 __ mov(t1, t2); | |
1758 __ Addu(t5, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); | |
1759 __ Subu(t5, t5, Operand(t2)); | |
1760 __ LoadRoot(t3, Heap::kTheHoleValueRootIndex); | |
1761 __ Lsa(a1, t0, t1, 1); | |
1762 __ Addu(a1, a1, Operand(kParameterMapHeaderSize)); | |
1763 | |
1764 // a1 = address of backing store (tagged) | |
1765 // t0 = address of parameter map (tagged) | |
1766 // a0 = temporary scratch (a.o., for address calculation) | |
1767 // t1 = loop variable (tagged) | |
1768 // t3 = the hole value | |
1769 __ jmp(¶meters_test); | |
1770 | |
1771 __ bind(¶meters_loop); | |
1772 __ Subu(t1, t1, Operand(Smi::FromInt(1))); | |
1773 __ sll(a0, t1, 1); | |
1774 __ Addu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag)); | |
1775 __ Addu(t6, t0, a0); | |
1776 __ sw(t5, MemOperand(t6)); | |
1777 __ Subu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize)); | |
1778 __ Addu(t6, a1, a0); | |
1779 __ sw(t3, MemOperand(t6)); | |
1780 __ Addu(t5, t5, Operand(Smi::FromInt(1))); | |
1781 __ bind(¶meters_test); | |
1782 __ Branch(¶meters_loop, ne, t1, Operand(Smi::FromInt(0))); | |
1783 | |
1784 // t1 = argument count (tagged). | |
1785 __ lw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset)); | |
1786 | |
1787 __ bind(&skip_parameter_map); | |
1788 // v0 = address of new object (tagged) | |
1789 // a1 = address of backing store (tagged) | |
1790 // t1 = argument count (tagged) | |
1791 // t2 = mapped parameter count (tagged) | |
1792 // t5 = scratch | |
1793 // Copy arguments header and remaining slots (if there are any). | |
1794 __ LoadRoot(t5, Heap::kFixedArrayMapRootIndex); | |
1795 __ sw(t5, FieldMemOperand(a1, FixedArray::kMapOffset)); | |
1796 __ sw(t1, FieldMemOperand(a1, FixedArray::kLengthOffset)); | |
1797 | |
1798 Label arguments_loop, arguments_test; | |
1799 __ sll(t6, t2, 1); | |
1800 __ Subu(a3, a3, Operand(t6)); | |
1801 __ jmp(&arguments_test); | |
1802 | |
1803 __ bind(&arguments_loop); | |
1804 __ Subu(a3, a3, Operand(kPointerSize)); | |
1805 __ lw(t0, MemOperand(a3, 0)); | |
1806 __ Lsa(t5, a1, t2, 1); | |
1807 __ sw(t0, FieldMemOperand(t5, FixedArray::kHeaderSize)); | |
1808 __ Addu(t2, t2, Operand(Smi::FromInt(1))); | |
1809 | |
1810 __ bind(&arguments_test); | |
1811 __ Branch(&arguments_loop, lt, t2, Operand(t1)); | |
1812 | |
1813 // Return. | |
1814 __ Ret(); | |
1815 | |
1816 // Do the runtime call to allocate the arguments object. | |
1817 // t1 = argument count (tagged) | |
1818 __ bind(&runtime); | |
1819 __ Push(a1, a3, t1); | |
1820 __ TailCallRuntime(Runtime::kNewSloppyArguments); | |
1821 } | |
1822 | |
1823 | |
1824 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { | 1594 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) { |
1825 // Return address is in ra. | 1595 // Return address is in ra. |
1826 Label slow; | 1596 Label slow; |
1827 | 1597 |
1828 Register receiver = LoadDescriptor::ReceiverRegister(); | 1598 Register receiver = LoadDescriptor::ReceiverRegister(); |
1829 Register key = LoadDescriptor::NameRegister(); | 1599 Register key = LoadDescriptor::NameRegister(); |
1830 | 1600 |
1831 // Check that the key is an array index, that is Uint32. | 1601 // Check that the key is an array index, that is Uint32. |
1832 __ And(t0, key, Operand(kSmiTagMask | kSmiSignMask)); | 1602 __ And(t0, key, Operand(kSmiTagMask | kSmiSignMask)); |
1833 __ Branch(&slow, ne, t0, Operand(zero_reg)); | 1603 __ Branch(&slow, ne, t0, Operand(zero_reg)); |
(...skipping 3300 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5134 __ SmiTag(a1); | 4904 __ SmiTag(a1); |
5135 __ Push(a0, a2, a1); | 4905 __ Push(a0, a2, a1); |
5136 __ CallRuntime(Runtime::kAllocateInNewSpace); | 4906 __ CallRuntime(Runtime::kAllocateInNewSpace); |
5137 __ Pop(a0, a2); | 4907 __ Pop(a0, a2); |
5138 } | 4908 } |
5139 __ jmp(&done_allocate); | 4909 __ jmp(&done_allocate); |
5140 } | 4910 } |
5141 } | 4911 } |
5142 | 4912 |
5143 | 4913 |
| 4914 void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) { |
| 4915 // ----------- S t a t e ------------- |
| 4916 // -- a1 : function |
| 4917 // -- cp : context |
| 4918 // -- fp : frame pointer |
| 4919 // -- ra : return address |
| 4920 // ----------------------------------- |
| 4921 __ AssertFunction(a1); |
| 4922 |
| 4923 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub. |
| 4924 __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset)); |
| 4925 __ lw(a2, |
| 4926 FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset)); |
| 4927 __ Lsa(a3, fp, a2, kPointerSizeLog2 - 1); |
| 4928 __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset)); |
| 4929 |
| 4930 // a1 : function |
| 4931 // a2 : number of parameters (tagged) |
| 4932 // a3 : parameters pointer |
| 4933 // Registers used over whole function: |
| 4934 // t1 : arguments count (tagged) |
| 4935 // t2 : mapped parameter count (tagged) |
| 4936 |
| 4937 // Check if the calling frame is an arguments adaptor frame. |
| 4938 Label adaptor_frame, try_allocate, runtime; |
| 4939 __ lw(t0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset)); |
| 4940 __ lw(a0, MemOperand(t0, StandardFrameConstants::kContextOffset)); |
| 4941 __ Branch(&adaptor_frame, eq, a0, |
| 4942 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); |
| 4943 |
| 4944 // No adaptor, parameter count = argument count. |
| 4945 __ mov(t1, a2); |
| 4946 __ Branch(USE_DELAY_SLOT, &try_allocate); |
| 4947 __ mov(t2, a2); // In delay slot. |
| 4948 |
| 4949 // We have an adaptor frame. Patch the parameters pointer. |
| 4950 __ bind(&adaptor_frame); |
| 4951 __ lw(t1, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset)); |
| 4952 __ Lsa(t0, t0, t1, 1); |
| 4953 __ Addu(a3, t0, Operand(StandardFrameConstants::kCallerSPOffset)); |
| 4954 |
| 4955 // t1 = argument count (tagged) |
| 4956 // t2 = parameter count (tagged) |
| 4957 // Compute the mapped parameter count = min(t2, t1) in t2. |
| 4958 __ mov(t2, a2); |
| 4959 __ Branch(&try_allocate, le, t2, Operand(t1)); |
| 4960 __ mov(t2, t1); |
| 4961 |
| 4962 __ bind(&try_allocate); |
| 4963 |
| 4964 // Compute the sizes of backing store, parameter map, and arguments object. |
| 4965 // 1. Parameter map, has 2 extra words containing context and backing store. |
| 4966 const int kParameterMapHeaderSize = |
| 4967 FixedArray::kHeaderSize + 2 * kPointerSize; |
| 4968 // If there are no mapped parameters, we do not need the parameter_map. |
| 4969 Label param_map_size; |
| 4970 DCHECK_EQ(static_cast<Smi*>(0), Smi::FromInt(0)); |
| 4971 __ Branch(USE_DELAY_SLOT, ¶m_map_size, eq, t2, Operand(zero_reg)); |
| 4972 __ mov(t5, zero_reg); // In delay slot: param map size = 0 when t2 == 0. |
| 4973 __ sll(t5, t2, 1); |
| 4974 __ addiu(t5, t5, kParameterMapHeaderSize); |
| 4975 __ bind(¶m_map_size); |
| 4976 |
| 4977 // 2. Backing store. |
| 4978 __ Lsa(t5, t5, t1, 1); |
| 4979 __ Addu(t5, t5, Operand(FixedArray::kHeaderSize)); |
| 4980 |
| 4981 // 3. Arguments object. |
| 4982 __ Addu(t5, t5, Operand(JSSloppyArgumentsObject::kSize)); |
| 4983 |
| 4984 // Do the allocation of all three objects in one go. |
| 4985 __ Allocate(t5, v0, t5, t0, &runtime, TAG_OBJECT); |
| 4986 |
| 4987 // v0 = address of new object(s) (tagged) |
| 4988 // a2 = argument count (smi-tagged) |
| 4989 // Get the arguments boilerplate from the current native context into t0. |
| 4990 const int kNormalOffset = |
| 4991 Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX); |
| 4992 const int kAliasedOffset = |
| 4993 Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX); |
| 4994 |
| 4995 __ lw(t0, NativeContextMemOperand()); |
| 4996 Label skip2_ne, skip2_eq; |
| 4997 __ Branch(&skip2_ne, ne, t2, Operand(zero_reg)); |
| 4998 __ lw(t0, MemOperand(t0, kNormalOffset)); |
| 4999 __ bind(&skip2_ne); |
| 5000 |
| 5001 __ Branch(&skip2_eq, eq, t2, Operand(zero_reg)); |
| 5002 __ lw(t0, MemOperand(t0, kAliasedOffset)); |
| 5003 __ bind(&skip2_eq); |
| 5004 |
| 5005 // v0 = address of new object (tagged) |
| 5006 // a2 = argument count (smi-tagged) |
| 5007 // t0 = address of arguments map (tagged) |
| 5008 // t2 = mapped parameter count (tagged) |
| 5009 __ sw(t0, FieldMemOperand(v0, JSObject::kMapOffset)); |
| 5010 __ LoadRoot(t5, Heap::kEmptyFixedArrayRootIndex); |
| 5011 __ sw(t5, FieldMemOperand(v0, JSObject::kPropertiesOffset)); |
| 5012 __ sw(t5, FieldMemOperand(v0, JSObject::kElementsOffset)); |
| 5013 |
| 5014 // Set up the callee in-object property. |
| 5015 __ AssertNotSmi(a1); |
| 5016 __ sw(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset)); |
| 5017 |
| 5018 // Use the length (smi tagged) and set that as an in-object property too. |
| 5019 __ AssertSmi(t1); |
| 5020 __ sw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset)); |
| 5021 |
| 5022 // Set up the elements pointer in the allocated arguments object. |
| 5023 // If we allocated a parameter map, t0 will point there, otherwise |
| 5024 // it will point to the backing store. |
| 5025 __ Addu(t0, v0, Operand(JSSloppyArgumentsObject::kSize)); |
| 5026 __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset)); |
| 5027 |
| 5028 // v0 = address of new object (tagged) |
| 5029 // a2 = argument count (tagged) |
| 5030 // t0 = address of parameter map or backing store (tagged) |
| 5031 // t2 = mapped parameter count (tagged) |
| 5032 // Initialize parameter map. If there are no mapped arguments, we're done. |
| 5033 Label skip_parameter_map; |
| 5034 Label skip3; |
| 5035 __ Branch(&skip3, ne, t2, Operand(Smi::FromInt(0))); |
| 5036 // Move backing store address to a1, because it is |
| 5037 // expected there when filling in the unmapped arguments. |
| 5038 __ mov(a1, t0); |
| 5039 __ bind(&skip3); |
| 5040 |
| 5041 __ Branch(&skip_parameter_map, eq, t2, Operand(Smi::FromInt(0))); |
| 5042 |
| 5043 __ LoadRoot(t1, Heap::kSloppyArgumentsElementsMapRootIndex); |
| 5044 __ sw(t1, FieldMemOperand(t0, FixedArray::kMapOffset)); |
| 5045 __ Addu(t1, t2, Operand(Smi::FromInt(2))); |
| 5046 __ sw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset)); |
| 5047 __ sw(cp, FieldMemOperand(t0, FixedArray::kHeaderSize + 0 * kPointerSize)); |
| 5048 __ Lsa(t1, t0, t2, 1); |
| 5049 __ Addu(t1, t1, Operand(kParameterMapHeaderSize)); |
| 5050 __ sw(t1, FieldMemOperand(t0, FixedArray::kHeaderSize + 1 * kPointerSize)); |
| 5051 |
| 5052 // Copy the parameter slots and the holes in the arguments. |
| 5053 // We need to fill in mapped_parameter_count slots. They index the context, |
| 5054 // where parameters are stored in reverse order, at |
| 5055 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1 |
| 5056 // The mapped parameter thus need to get indices |
| 5057 // MIN_CONTEXT_SLOTS+parameter_count-1 .. |
| 5058 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count |
| 5059 // We loop from right to left. |
| 5060 Label parameters_loop, parameters_test; |
| 5061 __ mov(t1, t2); |
| 5062 __ Addu(t5, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS))); |
| 5063 __ Subu(t5, t5, Operand(t2)); |
| 5064 __ LoadRoot(t3, Heap::kTheHoleValueRootIndex); |
| 5065 __ Lsa(a1, t0, t1, 1); |
| 5066 __ Addu(a1, a1, Operand(kParameterMapHeaderSize)); |
| 5067 |
| 5068 // a1 = address of backing store (tagged) |
| 5069 // t0 = address of parameter map (tagged) |
| 5070 // a0 = temporary scratch (a.o., for address calculation) |
| 5071 // t1 = loop variable (tagged) |
| 5072 // t3 = the hole value |
| 5073 __ jmp(¶meters_test); |
| 5074 |
| 5075 __ bind(¶meters_loop); |
| 5076 __ Subu(t1, t1, Operand(Smi::FromInt(1))); |
| 5077 __ sll(a0, t1, 1); |
| 5078 __ Addu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag)); |
| 5079 __ Addu(t6, t0, a0); |
| 5080 __ sw(t5, MemOperand(t6)); |
| 5081 __ Subu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize)); |
| 5082 __ Addu(t6, a1, a0); |
| 5083 __ sw(t3, MemOperand(t6)); |
| 5084 __ Addu(t5, t5, Operand(Smi::FromInt(1))); |
| 5085 __ bind(¶meters_test); |
| 5086 __ Branch(¶meters_loop, ne, t1, Operand(Smi::FromInt(0))); |
| 5087 |
| 5088 // t1 = argument count (tagged). |
| 5089 __ lw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset)); |
| 5090 |
| 5091 __ bind(&skip_parameter_map); |
| 5092 // v0 = address of new object (tagged) |
| 5093 // a1 = address of backing store (tagged) |
| 5094 // t1 = argument count (tagged) |
| 5095 // t2 = mapped parameter count (tagged) |
| 5096 // t5 = scratch |
| 5097 // Copy arguments header and remaining slots (if there are any). |
| 5098 __ LoadRoot(t5, Heap::kFixedArrayMapRootIndex); |
| 5099 __ sw(t5, FieldMemOperand(a1, FixedArray::kMapOffset)); |
| 5100 __ sw(t1, FieldMemOperand(a1, FixedArray::kLengthOffset)); |
| 5101 |
| 5102 Label arguments_loop, arguments_test; |
| 5103 __ sll(t6, t2, 1); |
| 5104 __ Subu(a3, a3, Operand(t6)); |
| 5105 __ jmp(&arguments_test); |
| 5106 |
| 5107 __ bind(&arguments_loop); |
| 5108 __ Subu(a3, a3, Operand(kPointerSize)); |
| 5109 __ lw(t0, MemOperand(a3, 0)); |
| 5110 __ Lsa(t5, a1, t2, 1); |
| 5111 __ sw(t0, FieldMemOperand(t5, FixedArray::kHeaderSize)); |
| 5112 __ Addu(t2, t2, Operand(Smi::FromInt(1))); |
| 5113 |
| 5114 __ bind(&arguments_test); |
| 5115 __ Branch(&arguments_loop, lt, t2, Operand(t1)); |
| 5116 |
| 5117 // Return. |
| 5118 __ Ret(); |
| 5119 |
| 5120 // Do the runtime call to allocate the arguments object. |
| 5121 // t1 = argument count (tagged) |
| 5122 __ bind(&runtime); |
| 5123 __ Push(a1, a3, t1); |
| 5124 __ TailCallRuntime(Runtime::kNewSloppyArguments); |
| 5125 } |
| 5126 |
| 5127 |
5144 void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { | 5128 void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) { |
5145 // ----------- S t a t e ------------- | 5129 // ----------- S t a t e ------------- |
5146 // -- a1 : function | 5130 // -- a1 : function |
5147 // -- cp : context | 5131 // -- cp : context |
5148 // -- fp : frame pointer | 5132 // -- fp : frame pointer |
5149 // -- ra : return address | 5133 // -- ra : return address |
5150 // ----------------------------------- | 5134 // ----------------------------------- |
5151 __ AssertFunction(a1); | 5135 __ AssertFunction(a1); |
5152 | 5136 |
5153 // For Ignition we need to skip all possible handler/stub frames until | 5137 // For Ignition we need to skip all possible handler/stub frames until |
(...skipping 545 matching lines...) Expand 10 before | Expand all | Expand 10 after Loading... |
5699 return_value_operand, NULL); | 5683 return_value_operand, NULL); |
5700 } | 5684 } |
5701 | 5685 |
5702 | 5686 |
5703 #undef __ | 5687 #undef __ |
5704 | 5688 |
5705 } // namespace internal | 5689 } // namespace internal |
5706 } // namespace v8 | 5690 } // namespace v8 |
5707 | 5691 |
5708 #endif // V8_TARGET_ARCH_MIPS | 5692 #endif // V8_TARGET_ARCH_MIPS |
OLD | NEW |